6
6
trigger :
7
7
- main
8
8
9
- jobs :
10
- - job : ' EvalChanges'
11
- displayName : ' Analyze changed files to determine which job to run'
12
- pool :
13
- vmImage : ' macOS-10.15'
14
- steps :
15
- # We want to enforce the following rules for PRs:
16
- # * if all modifications are to README.md
17
- # no testing is needed
18
- # * if there are modifications to docs/* or to any code
19
- # then docs need to be built to verify consistency
20
- # * if there are modifications to notebooks/* or to any code
21
- # then notebooks need to be run to verify consistency
22
- # * for any code changes (or changes to metadata files)
23
- # linting and testing should be run
24
- # For a PR build, HEAD will be the merge commit, and we want to diff against the base branch,
25
- # which will be the first parent: HEAD^
26
- # (For non-PR changes, we will always perform all CI tasks)
27
- - powershell : |
28
- if ($env:BUILD_REASON -eq 'PullRequest') {
29
- $editedFiles = git diff HEAD^ --name-only
30
- $editedFiles # echo edited files to enable easier debugging
31
- $codeChanges = $false
32
- $docChanges = $false
33
- $nbChanges = $false
34
- $changeType = "none"
35
- foreach ($file in $editedFiles) {
36
- switch -Wildcard ($file) {
37
- "README.md" { Continue }
38
- "econml/_version.py" { Continue }
39
- "prototypes/*" { Continue }
40
- "images/*" { Continue }
41
- "doc/*" { $docChanges = $true; Continue }
42
- "notebooks/*" { $nbChanges = $true; Continue }
43
- default { $codeChanges = $true; Continue }
44
- }
45
- }
46
- }
47
- Write-Host "##vso[task.setvariable variable=buildDocs;isOutput=true]$(($env:BUILD_REASON -ne 'PullRequest') -or ($docChanges -or $codeChanges))"
48
- Write-Host "##vso[task.setvariable variable=buildNbs;isOutput=true]$(($env:BUILD_REASON -ne 'PullRequest') -or ($nbChanges -or $codeChanges))"
49
- Write-Host "##vso[task.setvariable variable=testCode;isOutput=true]$(($env:BUILD_REASON -ne 'PullRequest') -or $codeChanges)"
50
- name: output
51
- displayName: 'Determine type of code change'
52
-
53
- - template : azure-pipelines-steps.yml
54
- parameters :
55
- versions : ['3.6']
56
- images : ['ubuntu-18.04']
57
- package : ' -e .[all]'
58
- job :
59
- job : ' Docs'
60
- displayName : ' Build documentation'
61
- dependsOn : ' EvalChanges'
62
- condition : eq(dependencies.EvalChanges.outputs['output.buildDocs'], 'True')
63
- steps :
64
- - script : ' sudo apt-get -yq install graphviz'
65
- displayName : ' Install graphviz'
66
-
67
- - script : ' pip install sklearn-contrib-lightning'
68
- displayName : ' Install lightning'
69
-
70
- - script : ' pip install git+https://github.com/slundberg/shap.git@d1d2700acc0259f211934373826d5ff71ad514de'
71
- displayName : ' Install specific version of shap'
72
-
73
- - script : ' pip install sphinx sphinx_rtd_theme'
74
- displayName : ' Install sphinx'
75
-
76
- - script : ' python setup.py build_sphinx -W'
77
- displayName : ' Build documentation'
78
-
79
- - publish : ' build/sphinx/html'
80
- artifact : ' Documentation'
81
- displayName : ' Publish documentation as artifact'
82
-
83
- - script : ' python setup.py build_sphinx -b doctest'
84
- displayName : ' Run doctests'
85
-
86
9
10
+ jobs :
87
11
- template : azure-pipelines-steps.yml
88
12
parameters :
89
13
versions : ['3.8']
90
14
images : ['ubuntu-18.04']
91
15
package : ' -e .[tf,plt]'
92
16
job :
93
17
job : ' Notebooks_cust'
94
- dependsOn : ' EvalChanges'
95
- condition : eq(dependencies.EvalChanges.outputs['output.buildNbs'], 'True')
96
18
displayName : ' Notebooks (Customer Solutions)'
97
19
steps :
98
20
# Work around https://github.com/pypa/pip/issues/9542
99
21
- script : ' pip install -U numpy~=1.21.0'
100
22
displayName : ' Upgrade numpy'
101
23
102
- - script : ' pip install pytest pytest-runner jupyter jupyter-client nbconvert nbformat seaborn xgboost tqdm && pip list && python setup.py pytest'
24
+ # shap 0.39 and sklearn 1.0 interact badly in these notebooks
25
+ # shap 0.40 has a bug in waterfall (https://github.com/slundberg/shap/issues/2283) that breaks our main tests
26
+ # but fixes the interaction here...
27
+ - script : ' pip install -U shap~=0.40.0'
28
+ displayName : ' Upgrade shap'
29
+
30
+ - script : ' pip install pytest pytest-runner jupyter jupyter-client nbconvert nbformat seaborn xgboost tqdm && pip freeze && python setup.py pytest'
103
31
displayName : ' Unit tests'
104
32
env :
105
33
PYTEST_ADDOPTS : ' -m "notebook"'
119
47
package : ' -e .[tf,plt]'
120
48
job :
121
49
job : ' Notebooks_noncust'
122
- dependsOn : ' EvalChanges'
123
- condition : eq(dependencies.EvalChanges.outputs['output.buildNbs'], 'True')
124
50
displayName : ' Notebooks (except Customer Solutions)'
125
51
steps :
126
52
# Work around https://github.com/pypa/pip/issues/9542
@@ -145,162 +71,3 @@ jobs:
145
71
testResultsFiles : ' **/test-results.xml'
146
72
testRunTitle : ' Notebooks'
147
73
condition : succeededOrFailed()
148
-
149
-
150
- # - job: 'AutoML'
151
- # dependsOn: 'EvalChanges'
152
- # condition: eq(dependencies.EvalChanges.outputs['output.testCode'], 'True')
153
- # variables:
154
- # python.version: '3.6'
155
- # pool:
156
- # vmImage: 'ubuntu-18.04'
157
- # steps:
158
- # - template: azure-pipelines-steps.yml
159
- # parameters:
160
- # body:
161
- # - task: AzureCLI@2
162
- # displayName: 'AutoML tests'
163
- # inputs:
164
- # azureSubscription: 'automl'
165
- # scriptLocation: 'inlineScript'
166
- # scriptType: 'pscore'
167
- # powerShellIgnoreLASTEXITCODE: '' # string for now due to https://github.com/microsoft/azure-pipelines-tasks/issues/12266
168
- # inlineScript: |
169
- # $env:SUBSCRIPTION_ID = az account show --query id -o tsv
170
- # python setup.py pytest
171
- # env:
172
- # WORKSPACE_NAME: 'testWorkspace'
173
- # RESOURCE_GROUP: 'testingAutoMLEconML'
174
- # PYTEST_ADDOPTS: '-m "automl" -n 0'
175
- # COVERAGE_PROCESS_START: 'setup.cfg'
176
-
177
- # - task: PublishTestResults@2
178
- # displayName: 'Publish Test Results **/test-results.xml'
179
- # inputs:
180
- # testResultsFiles: '**/test-results.xml'
181
- # testRunTitle: 'AutoML'
182
- # condition: succeededOrFailed()
183
- # package: '.[automl]'
184
-
185
- - template : azure-pipelines-steps.yml
186
- parameters :
187
- versions : ['3.8']
188
- images : ['macOS-10.15']
189
- job :
190
- job : ' Linting'
191
- dependsOn : ' EvalChanges'
192
- condition : eq(dependencies.EvalChanges.outputs['output.testCode'], 'True')
193
- steps :
194
- - script : ' pip install pycodestyle && pycodestyle econml'
195
- failOnStderr : true
196
- displayName : Linting
197
-
198
- - template : azure-pipelines-steps.yml
199
- parameters :
200
- package : ' -e .[tf,plt]'
201
- job :
202
- job : Tests_main
203
- dependsOn : ' EvalChanges'
204
- condition : eq(dependencies.EvalChanges.outputs['output.testCode'], 'True')
205
- displayName : ' Run tests (main)'
206
- steps :
207
- - script : ' pip install pytest pytest-runner && python setup.py pytest'
208
- displayName : ' Unit tests'
209
- env :
210
- PYTEST_ADDOPTS : ' -m "not (notebook or automl or dml or serial or cate_api)" -n 2'
211
- COVERAGE_PROCESS_START : ' setup.cfg'
212
- - task : PublishTestResults@2
213
- displayName : ' Publish Test Results **/test-results.xml'
214
- inputs :
215
- testResultsFiles : ' **/test-results.xml'
216
- testRunTitle : ' Python $(python.version), image $(imageName)'
217
- condition : succeededOrFailed()
218
-
219
- - task : PublishCodeCoverageResults@1
220
- displayName : ' Publish Code Coverage Results'
221
- inputs :
222
- codeCoverageTool : Cobertura
223
- summaryFileLocation : ' $(System.DefaultWorkingDirectory)/**/coverage.xml'
224
-
225
- - template : azure-pipelines-steps.yml
226
- parameters :
227
- package : ' -e .[tf,plt]'
228
- job :
229
- job : Tests_dml
230
- dependsOn : ' EvalChanges'
231
- condition : eq(dependencies.EvalChanges.outputs['output.testCode'], 'True')
232
- displayName : ' Run tests (DML)'
233
- steps :
234
- - script : ' pip install pytest pytest-runner && python setup.py pytest'
235
- displayName : ' Unit tests'
236
- env :
237
- PYTEST_ADDOPTS : ' -m "dml"'
238
- COVERAGE_PROCESS_START : ' setup.cfg'
239
- - task : PublishTestResults@2
240
- displayName : ' Publish Test Results **/test-results.xml'
241
- inputs :
242
- testResultsFiles : ' **/test-results.xml'
243
- testRunTitle : ' Python $(python.version), image $(imageName)'
244
- condition : succeededOrFailed()
245
-
246
- - task : PublishCodeCoverageResults@1
247
- displayName : ' Publish Code Coverage Results'
248
- inputs :
249
- codeCoverageTool : Cobertura
250
- summaryFileLocation : ' $(System.DefaultWorkingDirectory)/**/coverage.xml'
251
-
252
- - template : azure-pipelines-steps.yml
253
- parameters :
254
- package : ' -e .[tf,plt]'
255
- job :
256
- job : Tests_serial
257
- dependsOn : ' EvalChanges'
258
- condition : eq(dependencies.EvalChanges.outputs['output.testCode'], 'True')
259
- displayName : ' Run tests (Serial)'
260
- steps :
261
- - script : ' pip install pytest pytest-runner && python setup.py pytest'
262
- displayName : ' Unit tests'
263
- env :
264
- PYTEST_ADDOPTS : ' -m "serial" -n 1'
265
- COVERAGE_PROCESS_START : ' setup.cfg'
266
- - task : PublishTestResults@2
267
- displayName : ' Publish Test Results **/test-results.xml'
268
- inputs :
269
- testResultsFiles : ' **/test-results.xml'
270
- testRunTitle : ' Python $(python.version), image $(imageName)'
271
- condition : succeededOrFailed()
272
-
273
- - task : PublishCodeCoverageResults@1
274
- displayName : ' Publish Code Coverage Results'
275
- inputs :
276
- codeCoverageTool : Cobertura
277
- summaryFileLocation : ' $(System.DefaultWorkingDirectory)/**/coverage.xml'
278
-
279
- - template : azure-pipelines-steps.yml
280
- parameters :
281
- package : ' -e .[tf,plt]'
282
- job :
283
- job : Tests_CATE_API
284
- dependsOn : ' EvalChanges'
285
- condition : eq(dependencies.EvalChanges.outputs['output.testCode'], 'True')
286
- displayName : ' Run tests (Other)'
287
- steps :
288
- - script : ' pip install pytest pytest-runner'
289
- displayName : ' Install pytest'
290
- - script : ' python setup.py pytest'
291
- displayName : ' CATE Unit tests'
292
- env :
293
- PYTEST_ADDOPTS : ' -m "cate_api" -n auto'
294
- COVERAGE_PROCESS_START : ' setup.cfg'
295
- - task : PublishTestResults@2
296
- displayName : ' Publish Test Results **/test-results.xml'
297
- inputs :
298
- testResultsFiles : ' **/test-results.xml'
299
- testRunTitle : ' Python $(python.version), image $(imageName)'
300
- condition : succeededOrFailed()
301
-
302
- - task : PublishCodeCoverageResults@1
303
- displayName : ' Publish Code Coverage Results'
304
- inputs :
305
- codeCoverageTool : Cobertura
306
- summaryFileLocation : ' $(System.DefaultWorkingDirectory)/**/coverage.xml'
0 commit comments