@@ -27,75 +27,64 @@ jobs:
27
27
DISPLAY : ' :99.0'
28
28
PYANSYS_OFF_SCREEN : True
29
29
DPF_PORT : 32772
30
- DPF_START_SERVER : False
31
30
pool :
32
31
vmImage : ' windows-2019'
33
32
34
- steps :
35
- - powershell : |
36
- powershell .ci/install_opengl.ps1
37
- .ci/setup_headless_display.sh
38
- pip install -r .ci/requirements_test_xvfb.txt
39
- python .ci/display_test.py
40
- displayName: Install and start a virtual framebuffer
41
-
42
- - task : UsePythonVersion@0
43
- inputs :
44
- versionSpec : $(python.version)
45
- addToPath : true
46
-
47
- - task : PipAuthenticate@1
48
- inputs :
49
- artifactFeeds : ' pyansys'
50
- onlyAddExtraIndex : true
51
-
52
- - script : |
53
- pip install -r requirements_build.txt
54
- python setup.py bdist_wheel
55
- pip install --find-links=dist ansys_dpf_core
56
- cd tests
57
- python -c "from ansys.dpf import core; print(core.Report(gpu=False))"
58
- displayName: Install ansys-dpf-core
59
-
60
- - task : UniversalPackages@0
33
+ steps :
34
+ - template : templates\prepare-environment-windows.yml
35
+
36
+ - task : PublishBuildArtifacts@1
37
+ displayName : ' WHEEL: publish artifacts'
61
38
inputs :
62
- command : ' download'
63
- downloadDirectory : ' $(System.DefaultWorkingDirectory)'
64
- feedsToUse : ' internal'
65
- vstsFeed : ' 705e121a-9631-49f5-8aaf-c7142856f923'
66
- vstsFeedPackage : ' f913c1d3-1fe4-404c-8c28-15a234e56803'
67
- vstsPackageVersion : ' 21.1.4'
68
-
69
- - script : |
70
- @echo on
71
- dir $(System.DefaultWorkingDirectory)
72
- set THISDIR=$(System.DefaultWorkingDirectory)
73
- set PATH=%THISDIR%\server\v211\tp\IntelMKL\2020.0.166\winx64\;%THISDIR%\server\v211\tp\hdf5\1.8.14\winx64\;%THISDIR%\server\v211\tp\CFFSDK\lib\winx64;%THISDIR%\res_files\;%PATH%
74
- cd %THISDIR%\server\v211\aisol\bin\winx64
75
- START /B Ans.Dpf.Grpc.exe --address 127.0.0.1 --port %DPF_PORT% > log.txt 2>&1
76
- python -c "from ansys.dpf import core; core.connect_to_server(port=$(DPF_PORT)); print('Python Connected')"
77
- displayName: Start DPF Server
39
+ PathtoPublish : ' $(System.DefaultWorkingDirectory)\dist'
40
+ ArtifactName : ' ansys_dpf_core_wheel'
41
+ enabled : true
78
42
79
43
- script : |
80
44
pip install -r requirements_test.txt
81
- cd tests
82
- pytest -v --junitxml=junit/test-results.xml --cov ansys.dpf.core --cov-report=xml
45
+ set THISDIR=$(System.DefaultWorkingDirectory)
46
+ cd tests=$(System.DefaultWorkingDirectory)
47
+ set AWP_ROOT212=%THISDIR%\server\v212
48
+ pytest -v --junitxml=junit/test-results.xml --cov ansys.dpf.core --cov-report=xml --reruns 3
49
+
83
50
displayName: Test Core API
51
+
52
+ - task : PublishTestResults@2
53
+ inputs :
54
+ testResultsFormat : ' JUnit'
55
+ testResultsFiles : ' junit/test-results.xml'
56
+ testRunTitle : ' windowsTests'
57
+ publishRunAttachments : true
58
+ condition : always()
59
+
60
+ - script : |
61
+ pip install twine
62
+ python setup.py sdist
63
+ twine upload --skip-existing dist/*
64
+ displayName: 'Upload to PyPi'
65
+ condition: contains(variables['Build.SourceBranch'], 'refs/tags/')
66
+ env:
67
+ TWINE_USERNAME: __token__
68
+ TWINE_PASSWORD: $(PYPI_TOKEN)
69
+ TWINE_REPOSITORY_URL: "https://upload.pypi.org/legacy/"
70
+
84
71
85
72
- script : |
86
- type $(System.DefaultWorkingDirectory)\server\v211 \aisol\bin\winx64\log.txt
73
+ type $(System.DefaultWorkingDirectory)\server\v212 \aisol\bin\winx64\log.txt
87
74
displayName: 'Show DPF Server Logs'
88
- condition: always()
75
+ condition: always()
76
+
77
+ - template : templates\kill-servers-windows.yml
78
+
89
79
90
80
- job : Linux
91
81
variables :
92
82
python.version : ' 3.7' # due to VTK 8.1.2 requirement for docbuild
93
83
DISPLAY : ' :99.0'
94
84
PYANSYS_OFF_SCREEN : True
95
- DPF_PORT : 32772
96
- DPF_START_SERVER : False
97
- DPF_IMAGE : docker.pkg.github.com/pyansys/dpf-core/dpf:v2021.1
98
- DPF_DOCKER : True
85
+ DPF_PORT : 50055
86
+ TEMP : $(System.DefaultWorkingDirectory)/temp
87
+
99
88
pool :
100
89
vmImage : ' ubuntu-20.04'
101
90
steps :
@@ -118,34 +107,139 @@ jobs:
118
107
- script : |
119
108
pip install -r requirements_build.txt
120
109
python setup.py bdist_wheel
121
- pip install --find-links=dist ansys_dpf_core
110
+ export WHEELNAME=`ls dist/*.whl`
111
+ echo ${WHEELNAME}
112
+ pip install ${WHEELNAME}
122
113
cd tests
123
114
python -c "from ansys.dpf import core; print(core.Report())"
124
115
displayName: Install ansys-dpf-core
125
116
117
+ - task : UniversalPackages@0
118
+ inputs :
119
+ command : ' download'
120
+ downloadDirectory : ' $(System.DefaultWorkingDirectory)'
121
+ feedsToUse : ' internal'
122
+ vstsFeed : ' 705e121a-9631-49f5-8aaf-c7142856f923'
123
+ vstsFeedPackage : ' dpf-linux' # TODO: update hash of packages
124
+ vstsPackageVersion : ' 21.2.5'
125
+
126
+ - script : |
127
+ echo $0
128
+ if pgrep -x "Ans.Dpf.Grpc" > /dev/null
129
+ then
130
+ pkill -f Ans.Dpf.Grpc.exe
131
+ fi
132
+ displayName: 'Kill all servers'
133
+ condition: always()
134
+ continueOnError: true
135
+
126
136
- script : |
127
- set -ex
128
- echo $(PAT) | docker login -u $(GH_USERNAME) --password-stdin docker.pkg.github.com
129
- docker pull $(DPF_IMAGE)
130
- docker run --restart always --name dpf -v `pwd`:/dpf -v /tmp:/dpf/_cache -p $(DPF_PORT):50054 $(DPF_IMAGE) > log.txt &
131
- grep -q 'server started on ip' <(timeout 60 tail -f log.txt)
132
- python -c "from ansys.dpf import core as dpf; dpf.connect_to_server(port=$(DPF_PORT)); print('Python Connected')"
133
- displayName: Pull, launch, and validate DPF service
134
-
137
+ env
138
+ displayName: Display env
139
+
140
+ - script : |
141
+ echo $0
142
+ export THISDIR=${SYSTEM_DEFAULTWORKINGDIRECTORY}
143
+ echo ${THISDIR}
144
+ export AWP_ROOT212=${THISDIR}/server/v212
145
+ cd ${THISDIR}/server/v212/aisol/bin/linx64
146
+ pwd
147
+ chmod 755 Ans.Dpf.Grpc.sh
148
+ chmod 755 Ans.Dpf.Grpc.exe
149
+ ./Ans.Dpf.Grpc.sh --port 50054 & > log.txt
150
+ export DPF_IP=$(hostname -i)
151
+ python -c "from ansys.dpf import core; core.connect_to_server(ip= '${DPF_IP}', port=50054); print('Python Connected')"
152
+ displayName: Start DPF Server
153
+
135
154
- script : |
136
155
pip install -r requirements_test.txt
137
- pip install pytest-azurepipelines
156
+ pip install pytest-azurepipelines
157
+ export AWP_ROOT212=${SYSTEM_DEFAULTWORKINGDIRECTORY}/server/v212
138
158
cd tests
139
- pytest -v --junitxml=junit/test-results.xml --cov ansys.dpf.core --cov-report=xml --cov-report=html
159
+ export DPF_IP=$(hostname -i)
160
+ pytest -v --junitxml=junit/test-results.xml --cov ansys.dpf.core --cov-report=xml --reruns 3
161
+ export PATH=`pwd`
162
+ echo ${PATH}
140
163
displayName: Test Core API
164
+
165
+ - task : PublishTestResults@2
166
+ inputs :
167
+ testResultsFormat : ' JUnit'
168
+ testResultsFiles : ' junit/test-results.xml'
169
+ testRunTitle : ' linuxTests'
170
+ publishRunAttachments : true
171
+ searchFolder : ' tests/'
172
+ condition : always()
173
+
174
+
175
+ - script : |
176
+ echo $0
177
+ if pgrep -x "Ans.Dpf.Grpc" > /dev/null
178
+ then
179
+ pkill -f Ans.Dpf.Grpc.exe
180
+ fi
181
+ displayName: 'Kill all servers'
182
+ condition: always()
183
+ continueOnError: true
141
184
185
+
186
+ - job : DocumentationWindows
187
+ variables :
188
+ python.version : ' 3.8'
189
+ DISPLAY : ' :99.0'
190
+ PYANSYS_OFF_SCREEN : True
191
+ DPF_PORT : 32772
192
+ GH_DOC_BRANCH : ' gh-pages'
193
+ pool :
194
+ vmImage : ' windows-2019'
195
+
196
+ steps :
197
+ - template : templates\prepare-environment-windows.yml
198
+
142
199
- script : |
143
- pip install twine
144
- python setup.py sdist
145
- twine upload --skip-existing dist/*
146
- displayName: 'Upload to PyPi'
200
+ pip install -r requirements_docs.txt
201
+ cd .ci
202
+ dir
203
+ set THISDIR=$(System.DefaultWorkingDirectory)
204
+ set AWP_ROOT212=%THISDIR%\server\v212
205
+ ECHO %AWP_ROOT212%
206
+ build_doc.bat
207
+ displayName: Build Documentation
208
+
209
+ - task : ArchiveFiles@2
210
+ inputs :
211
+ rootFolderOrFile : ' $(System.DefaultWorkingDirectory)\docs\build'
212
+ includeRootFolder : false
213
+ archiveType : ' zip'
214
+ archiveFile : ' $(System.DefaultWorkingDirectory)\docs\archive\doc-ansys-dpf-core.zip'
215
+ replaceExistingArchive : true
216
+ displayName : ' DOCUMENTATION: zip artifacts'
217
+
218
+ - task : PublishBuildArtifacts@1
219
+ displayName : ' DOCUMENTATION: publish artifacts'
220
+ inputs :
221
+ PathtoPublish : ' $(System.DefaultWorkingDirectory)\docs\archive'
222
+ ArtifactName : doc-ansys-dpf-core
223
+ enabled : true
224
+
225
+ - powershell : |
226
+ git init
227
+ git checkout -b $(GH_DOC_BRANCH)
228
+ git config --global user.name "pyansys-ci-bot"
229
+ git config --global user.email "$(GH_EMAIL)"
230
+ New-Item -ItemType file .nojekyll
231
+ git add .
232
+ git commit -m "Documentation generated by $(Build.DefinitionName)"
233
+ displayName: "Init git and add docs"
234
+ workingDirectory: docs\build\html
235
+
236
+ - script : |
237
+ git remote add origin https://$(GH_PAT)@github.com/pyansys/DPF-Core-docs
238
+ git push -u origin $(GH_DOC_BRANCH) --force
239
+ displayName: "Publish GitHub Pages merge commit"
240
+ workingDirectory: docs\build\html
147
241
condition: contains(variables['Build.SourceBranch'], 'refs/tags/')
148
- env:
149
- TWINE_USERNAME: __token__
150
- TWINE_PASSWORD: $(PYPI_TOKEN)
151
- TWINE_REPOSITORY_URL: "https://upload.pypi.org/legacy/"
242
+
243
+ - template : templates\kill-servers-windows.yml
244
+
245
+
0 commit comments