Skip to content

Commit 3f866d9

Browse files
authored
Prepare pipelines for v222 and fix doctests (#300)
1 parent 0f3fa64 commit 3f866d9

15 files changed

+55
-27
lines changed
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.

.github/workflows/ci.yml

+16-7
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@ on:
1010
branches:
1111
- master
1212
- "release*"
13+
- "merge*"
1314

1415
concurrency:
1516
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
@@ -18,7 +19,7 @@ concurrency:
1819
env:
1920
PACKAGE_NAME: ansys-dpf-core
2021
MODULE: core
21-
ANSYS_VERSION: 221
22+
ANSYS_VERSION: 222
2223

2324
jobs:
2425
Build_and_Test:
@@ -34,7 +35,7 @@ jobs:
3435
- uses: actions/checkout@v3
3536

3637
- name: "Build Package"
37-
uses: pyansys/pydpf-actions/build_package@v2.1.1
38+
uses: pyansys/pydpf-actions/build_package@v2.2
3839
with:
3940
python-version: ${{ matrix.python-version }}
4041
ANSYS_VERSION: ${{env.ANSYS_VERSION}}
@@ -44,9 +45,10 @@ jobs:
4445
install_extras: plotting
4546

4647
- name: "Test Package"
47-
uses: pyansys/pydpf-actions/test_package@v2.1.1
48+
uses: pyansys/pydpf-actions/test_package@v2.2
4849
with:
4950
MODULE: ${{env.MODULE}}
51+
DEBUG: True
5052

5153
- name: 'Upload to PyPi'
5254
if: contains(github.ref, 'refs/tags')
@@ -62,10 +64,7 @@ jobs:
6264

6365
build_doc:
6466
name: Documentation
65-
runs-on: windows-2019
66-
67-
env:
68-
ANSYS_VERSION: 221
67+
runs-on: windows-latest
6968

7069
steps:
7170
- uses: actions/checkout@v3
@@ -87,6 +86,16 @@ jobs:
8786
- name: Set SERVER
8887
run: echo "SERVER=$env:AWP_ROOT221" | Out-File -FilePath $Env:GITHUB_ENV -Encoding utf-8 -Append
8988

89+
- name: "Install gatebin Windows"
90+
working-directory: .github\workflows
91+
run: pip install ansys_dpf_gatebin-0.1.dev1-py3-none-win_amd64.whl
92+
93+
- name: "Install pygate"
94+
working-directory: .github\workflows
95+
run: |
96+
pip install ansys_dpf_gate-0.1.dev1-py3-none-any.whl
97+
pip install ansys_grpc_dpf-0.5.dev1-py3-none-any.whl
98+
9099
- name: Install ansys-dpf-core
91100
shell: cmd
92101
run: |

ansys/dpf/core/core.py

+2
Original file line numberDiff line numberDiff line change
@@ -156,6 +156,8 @@ def download_file(server_file_path, to_client_file_path, server=None):
156156
>>> import os
157157
>>> file_path = dpf.upload_file_in_tmp_folder(examples.static_rst)
158158
>>> dpf.download_file(file_path, examples.static_rst)
159+
<BLANKLINE>
160+
Downloading...
159161
160162
"""
161163
base = BaseService(server, load_operators=False)

ansys/dpf/core/cyclic_support.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -192,7 +192,7 @@ def sectors_set_for_expansion(self, stage_num=0) -> Scoping:
192192
>>> multi_stage = examples.download_multi_stage_cyclic_result()
193193
>>> cyc_support = Model(multi_stage).metadata.result_info.cyclic_support
194194
>>> print(cyc_support.sectors_set_for_expansion(stage_num=1).ids)
195-
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]
195+
[...0... 1... 2... 3... 4... 5... 6... 7... 8... 9... 10... 11]
196196
197197
"""
198198
sectors_for_expansion = self._api.cyclic_support_get_sectors_scoping(self, stage_num)
@@ -224,7 +224,7 @@ def expand_node_id(self, node_id, sectors=None, stage_num=0):
224224
>>> multi_stage = examples.download_multi_stage_cyclic_result()
225225
>>> cyc_support = Model(multi_stage).metadata.result_info.cyclic_support
226226
>>> print(cyc_support.expand_node_id(1,stage_num=0).ids)
227-
[1, 3596, 5816, 8036, 10256, 12476]
227+
[...1... 3596... 5816... 8036... 10256... 12476]
228228
229229
"""
230230
if sectors is None:
@@ -262,7 +262,7 @@ def expand_element_id(self, element_id, sectors=None, stage_num=0):
262262
>>> multi_stage = examples.download_multi_stage_cyclic_result()
263263
>>> cyc_support = Model(multi_stage).metadata.result_info.cyclic_support
264264
>>> print(cyc_support.expand_element_id(1,stage_num=0).ids)
265-
[1, 1558, 2533, 3508, 4483, 5458]
265+
[...1... 1558... 2533... 3508... 4483... 5458]
266266
267267
"""
268268
if sectors is None:

ansys/dpf/core/data_tree.py

+4
Original file line numberDiff line numberDiff line change
@@ -243,6 +243,8 @@ def write_to_txt(self, path=None):
243243
>>> import tempfile
244244
>>> import os
245245
>>> data_tree.write_to_txt(os.path.join(tempfile.mkdtemp(), "data_tree.txt"))
246+
<BLANKLINE>
247+
Downloading...
246248
247249
"""
248250
from ansys.dpf.core.operators.serialization import data_tree_to_txt
@@ -280,6 +282,8 @@ def write_to_json(self, path=None):
280282
>>> import tempfile
281283
>>> import os
282284
>>> data_tree.write_to_json(os.path.join(tempfile.mkdtemp(), "data_tree.json"))
285+
<BLANKLINE>
286+
Downloading...
283287
284288
"""
285289
from ansys.dpf.core.operators.serialization import data_tree_to_json

ansys/dpf/core/field_base.py

+2-1
Original file line numberDiff line numberDiff line change
@@ -366,7 +366,8 @@ def append(self, data, scopingid):
366366
array([[1., 2., 3.],
367367
[1., 2., 3.]])
368368
>>> field.scoping.ids
369-
[1, 2]
369+
<BLANKLINE>
370+
...[1, 2]...
370371
371372
"""
372373
pass

ansys/dpf/core/operator_specification.py

+14-8
Original file line numberDiff line numberDiff line change
@@ -142,9 +142,11 @@ class ConfigOptionSpec:
142142
>>> operator = dpf.operators.math.add()
143143
>>> config_spec = operator.specification.config_specification
144144
>>> config_spec.keys()
145-
dict_keys(['binary_operation', 'inplace', 'mutex', 'num_threads', 'permissive', 'run_in_parallel', 'use_cache', 'work_by_index']) # noqa: E501
145+
dict_keys(['binary_operation', 'inplace', 'mutex', 'num_threads', 'permissive', \
146+
'run_in_parallel', 'use_cache', 'work_by_index'])
146147
>>> config_spec['inplace']
147-
ConfigOptionSpec(name='inplace', type_names=['bool'], default_value_str='false', document='The output is written over the input to save memory if this config is set to true.') # noqa: E501
148+
ConfigOptionSpec(name='inplace', type_names=['bool'], default_value_str='false', \
149+
document='The output is written over the input to save memory if this config is set to true.')
148150
149151
"""
150152

@@ -257,18 +259,22 @@ def properties(self):
257259
>>> from ansys.dpf import core as dpf
258260
>>> operator = dpf.operators.math.add()
259261
>>> operator.specification.properties
260-
{'plugin': 'core', 'category': 'math', 'user_name': '+', 'exposure': 'public'}
262+
{'category': 'math', 'exposure': 'public', 'plugin': 'core', 'user_name': '+'}
261263
"""
262264
if self._properties is None:
263-
self._properties = dict()
265+
temp_properties = dict()
264266
if self._internal_obj is not None:
265267
num_properties = self._api.operator_specification_get_num_properties(self)
266268
for i_property in range(num_properties):
267269
property_key = self._api.operator_specification_get_property_key(
268270
self, i_property
269271
)
270272
prop = self._api.operator_specification_get_properties(self, property_key)
271-
self._properties[property_key] = prop
273+
temp_properties[property_key] = prop
274+
# Reorder the properties for consistency
275+
self._properties = dict()
276+
for key in sorted(temp_properties.keys()):
277+
self._properties[key] = temp_properties[key]
272278
return self._properties
273279

274280
@property
@@ -284,7 +290,7 @@ def description(self) -> str:
284290
>>> from ansys.dpf import core as dpf
285291
>>> operator = dpf.operators.math.scale_by_field()
286292
>>> operator.specification.description
287-
"Scales a field (in 0) by a scalar field (in 1). If one field's scoping has 'overall' location, then these field's values are applied on the entire other field." # noqa: E501
293+
"Scales a field (in 0) by a scalar field (in 1). If one field's ... the entire other field."
288294
"""
289295
if self._internal_obj is not None:
290296
return self._api.operator_specification_get_description(self)
@@ -305,7 +311,7 @@ def inputs(self) -> dict:
305311
>>> 4 in operator.specification.inputs.keys()
306312
True
307313
>>> operator.specification.inputs[4]
308-
PinSpecification(name='data_sources', _type_names=['data_sources'], optional=False, document='result file path container, used if no streams are set', ellipsis=False) # noqa: E501
314+
PinSpecification(name='data_sources', _type_names=['data_sources'], ...set', ellipsis=False)
309315
"""
310316
if self._map_input_pin_spec is None:
311317
self._map_input_pin_spec = {}
@@ -325,7 +331,7 @@ def outputs(self) -> dict:
325331
>>> from ansys.dpf import core as dpf
326332
>>> operator = dpf.operators.mesh.mesh_provider()
327333
>>> operator.specification.outputs
328-
{0: PinSpecification(name='mesh', _type_names=['abstract_meshed_region'], optional=False, document='', ellipsis=False)} # noqa: E501
334+
{0: PinSpecification(name='mesh', _type_names=['abstract_meshed_region'], ...=False)}
329335
"""
330336
if self._map_output_pin_spec is None:
331337
self._map_output_pin_spec = {}

ansys/dpf/core/results.py

+8-4
Original file line numberDiff line numberDiff line change
@@ -285,7 +285,8 @@ def on_all_time_freqs(self):
285285
>>> model = dpf.Model(examples.msup_transient)
286286
>>> disp = model.results.displacement
287287
>>> disp.on_all_time_freqs.eval().get_label_scoping("time").ids
288-
[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20]
288+
<BLANKLINE>
289+
...1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20]...
289290
290291
"""
291292
self._time_scoping = list(
@@ -308,7 +309,8 @@ def on_first_time_freq(self):
308309
>>> model = dpf.Model(examples.msup_transient)
309310
>>> disp = model.results.displacement
310311
>>> disp.on_first_time_freq.eval().get_label_scoping("time").ids
311-
[1]
312+
<BLANKLINE>
313+
...[1]...
312314
313315
"""
314316
self._time_scoping = 1
@@ -329,7 +331,8 @@ def on_last_time_freq(self):
329331
>>> model = dpf.Model(examples.msup_transient)
330332
>>> disp = model.results.displacement
331333
>>> disp.on_last_time_freq.eval().get_label_scoping("time").ids
332-
[20]
334+
<BLANKLINE>
335+
...[20]...
333336
334337
"""
335338
self._time_scoping = len(
@@ -421,7 +424,8 @@ def split_by_body(self):
421424
>>> len(fc_disp)
422425
11
423426
>>> fc_disp.get_mat_scoping().ids
424-
[1, 5, 6, 10, 2, 7, 8, 13, 4, 12, 15]
427+
<BLANKLINE>
428+
...1, 5, 6, 10, 2, 7, 8, 13, 4, 12, 15]...
425429
>>> disp_mat_10 = fc_disp.get_field_by_mat_id(10)
426430
427431
"""

ansys/dpf/core/time_freq_support.py

+2-1
Original file line numberDiff line numberDiff line change
@@ -95,7 +95,8 @@ def time_frequencies(self):
9595
>>> time_freq_support = model.metadata.time_freq_support
9696
>>> freq = time_freq_support.time_frequencies
9797
>>> freq.data
98-
array([0. , 0.019975 , 0.039975 , 0.059975 , 0.079975 ,
98+
<BLANKLINE>
99+
...rray([0. , 0.019975 , 0.039975 , 0.059975 , 0.079975 ,
99100
0.099975 , 0.119975 , 0.139975 , 0.159975 , 0.179975 ,
100101
0.199975 , 0.218975 , 0.238975 , 0.258975 , 0.278975 ,
101102
0.298975 , 0.318975 , 0.338975 , 0.358975 , 0.378975 ,

ansys/dpf/core/workflow.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -366,7 +366,7 @@ def add_operators(self, operators):
366366
>>> workflow = dpf.Workflow()
367367
>>> disp_op = dpf.Operator("U")
368368
>>> max_op = dpf.Operator("min_max")
369-
>>> workflow.add_operator([disp_op,max_op])
369+
>>> workflow.add_operators([disp_op, max_op])
370370
371371
"""
372372
if isinstance(operators, list):

pytest.ini

+2-1
Original file line numberDiff line numberDiff line change
@@ -3,4 +3,5 @@ filterwarnings =
33
ignore::FutureWarning
44
ignore::PendingDeprecationWarning
55
ignore::DeprecationWarning
6-
norecursedirs = *
6+
norecursedirs = *
7+
doctest_optionflags = NORMALIZE_WHITESPACE ELLIPSIS

tests/conftest.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@
2020
# currently running dpf on docker. Used for testing on CI
2121
running_docker = ansys.dpf.core.server_types.RUNNING_DOCKER["use_docker"]
2222

23-
local_test_repo = True
23+
local_test_repo = False
2424

2525
if os.name == "posix":
2626
import ssl

0 commit comments

Comments
 (0)