Skip to content

Commit 01a55b1

Browse files
committed
Merged PR 236741: move from codedv
move from codedv Related work items: #308291
1 parent 7dcf603 commit 01a55b1

File tree

148 files changed

+47052
-16
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

148 files changed

+47052
-16
lines changed

.gitignore

+52
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,52 @@
1+
2+
3+
#########################
4+
## DPF
5+
#########################
6+
7+
# cache and pyc
8+
__pycache__
9+
.pytest_cache
10+
.spyproject
11+
12+
#tests
13+
__pycache__
14+
/tests/.pytest_cache
15+
16+
#ansys/dpf
17+
/ansys/dpf/__pycache__
18+
19+
#ansys/dpf/core
20+
/ansys/dpf/core/__pycache__
21+
22+
#operators
23+
/ansys/dpf/core/operators/__pycache__
24+
25+
#other
26+
/perso
27+
.ipynb_checkpoints/
28+
29+
# compiled documentation
30+
docs/build
31+
docs/source/examples
32+
33+
# pip files
34+
*.egg-info
35+
build/
36+
dist/
37+
38+
# emacs
39+
flycheck*
40+
*~
41+
\#*\#
42+
.\#*
43+
44+
# docker bin
45+
docker/v211
46+
47+
# pytest -coverage
48+
.coverage
49+
test-output.xml
50+
51+
# downloaded files
52+
ansys/dpf/core/examples/_cache/

README.md

+13-16
Original file line numberDiff line numberDiff line change
@@ -1,20 +1,17 @@
1-
# Introduction
2-
TODO: Give a short introduction of your project. Let this section explain the objectives or the motivation behind this project.
1+
# DPF
2+
ANSYS Data Processing Framework.
33

4-
# Getting Started
5-
TODO: Guide users through getting your code up and running on their own system. In this section you can talk about:
6-
1. Installation process
7-
2. Software dependencies
8-
3. Latest releases
9-
4. API references
4+
## Disclaimer
105

11-
# Build and Test
12-
TODO: Describe and show how to build your code and run the tests.
6+
This API is currently a work in progress - things will break and change!
137

14-
# Contribute
15-
TODO: Explain how other users and developers can contribute to make your code better.
168

17-
If you want to learn more about creating good readme files then refer the following [guidelines](https://docs.microsoft.com/en-us/azure/devops/repos/git/create-a-readme?view=azure-devops). You can also seek inspiration from the below readme files:
18-
- [ASP.NET Core](https://github.com/aspnet/Home)
19-
- [Visual Studio Code](https://github.com/Microsoft/vscode)
20-
- [Chakra Core](https://github.com/Microsoft/ChakraCore)
9+
## Get Started
10+
11+
Clone or copy this directory and then install using:
12+
13+
```
14+
pip install . --user
15+
```
16+
17+
See the example scripts in the examples folder for some basic example. More will be added later.

ansys/dpf/core/__init__.py

+81
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,81 @@
1+
import os
2+
import socket
3+
4+
from ansys.dpf.core._version import __version__
5+
6+
# environment variables for pyansys.com
7+
if 'jupyter' in socket.gethostname():
8+
if 'ANSYS_PATH' not in os.environ:
9+
os.environ['ANSYS_PATH'] = '/mnt/ansys_inc/v212/'
10+
if 'DPF_PATH' not in os.environ:
11+
os.environ['DPF_PATH'] = '/mnt/ansys_inc/dpf/bin_v%s/Ans.dpf.core.Grpc.exe' % __version__
12+
if 'AWP_UNIT_TEST_FILES' not in os.environ:
13+
os.environ['AWP_UNIT_TEST_FILES'] = '/mnt/ansys_inc/dpf/test_files/'
14+
15+
from ansys.dpf.core.misc import module_exists, Report
16+
from ansys.dpf.core.dpf_operator import Operator, Config
17+
from ansys.dpf.core.model import Model
18+
from ansys.dpf.core.field import Field, FieldDefinition, Dimensionnality
19+
from ansys.dpf.core.fields_container import FieldsContainer
20+
from ansys.dpf.core.meshes_container import MeshesContainer
21+
from ansys.dpf.core.scopings_container import ScopingsContainer
22+
from ansys.dpf.core.server import (start_local_server, _global_server,
23+
connect_to_server)
24+
from ansys.dpf.core.data_sources import DataSources
25+
from ansys.dpf.core.scoping import Scoping
26+
from ansys.dpf.core.common import types, natures, locations, shell_layers
27+
from ansys.dpf.core.core import BaseService, load_library, download_file, upload_file, upload_file_in_tmp_folder
28+
from ansys.dpf.core.time_freq_support import TimeFreqSupport
29+
from ansys.dpf.core.operators_helper import sum, to_nodal, norm, eqv, element_dot, sqr
30+
from ansys.dpf.core.meshed_region import MeshedRegion
31+
from ansys.dpf.core.result_info import ResultInfo
32+
from ansys.dpf.core.collection import Collection
33+
from ansys.dpf.core.workflow import Workflow
34+
from ansys.dpf.core import operators
35+
from ansys.dpf.core.fields_factory import field_from_array
36+
37+
# for matplotlib
38+
# solves "QApplication: invalid style override passed, ignoring it."
39+
os.environ['QT_STYLE_OVERRIDE'] = ''
40+
41+
# Setup data directory
42+
USER_DATA_PATH = None
43+
EXAMPLES_PATH = None
44+
if os.environ.get('DPF_DOCKER', False): # pragma: no cover
45+
# Running DPF within docker (likely for CI)
46+
# path must be relative to DPF directory
47+
#
48+
# assumes the following docker mount:
49+
# -v /tmp:/dpf/_cache
50+
EXAMPLES_PATH = '/tmp'
51+
else:
52+
try:
53+
import appdirs
54+
USER_DATA_PATH = appdirs.user_data_dir('ansys-dpf-core')
55+
if not os.path.exists(USER_DATA_PATH): # pragma: no cover
56+
os.makedirs(USER_DATA_PATH)
57+
58+
EXAMPLES_PATH = os.path.join(USER_DATA_PATH, 'examples')
59+
if not os.path.exists(EXAMPLES_PATH): # pragma: no cover
60+
os.makedirs(EXAMPLES_PATH)
61+
except: # pragma: no cover
62+
pass
63+
64+
65+
# Configure PyVista's ``rcParams`` for dpf
66+
if module_exists("pyvista"):
67+
import pyvista as pv
68+
pv.rcParams['interactive'] = True
69+
pv.rcParams["cmap"] = "jet"
70+
pv.rcParams["font"]["family"] = "courier"
71+
pv.rcParams["title"] = "DPF"
72+
73+
74+
SERVER = None
75+
76+
def has_local_server():
77+
"""Returns True when a local DPF gRPC server has been created"""
78+
return SERVER is not None
79+
80+
81+
_server_instances = []

ansys/dpf/core/_version.py

+7
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
"""Version for ansys-dpf-core"""
2+
# major, minor, patch
3+
version_info = 0, 2, 1
4+
5+
# Nice string for the version
6+
__version__ = '.'.join(map(str, version_info))
7+
__ansys_version__ = "212"

ansys/dpf/core/aeneid.py

+44
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,44 @@
1+
from ansys.dpf.core.server import DpfServer
2+
3+
"""Aeneid specific functions and classes"""
4+
5+
def start_server_using_service_manager(): # pragma: no cover
6+
if dpf.core.module_exists("grpc_interceptor_headers"):
7+
import grpc_interceptor_headers
8+
from grpc_interceptor_headers.header_manipulator_client_interceptor import header_adder_interceptor
9+
else:
10+
raise ValueError('Module grpc_interceptor_headers is missing to use service manager, please install it using pip install grpc_interceptor_headers')
11+
12+
service_manager_url = f"http://{LOCALHOST}:8089/v1"
13+
14+
definition = requests.get(url=service_manager_url + "/definitions/dpf").json()
15+
rsp = requests.post(url=service_manager_url + "/jobs", json=definition)
16+
job = rsp.json()
17+
18+
dpf_task = job['taskGroups'][0]['tasks'][0]
19+
dpf_service = dpf_task['services'][0]
20+
dpf_service_name = dpf_service['name']
21+
dpf_url = f"{dpf_service['host']}:{dpf_service['port']}"
22+
23+
channel = grpc.insecure_channel(dpf_url)
24+
header_adder = header_adder_interceptor('service-name', dpf_service_name)
25+
intercept_channel = grpc.intercept_channel(channel, header_adder)
26+
dpf.core.SERVER = DpfJob(service_manager_url, dpf_service_name,intercept_channel)
27+
28+
dpf.core._server_instances.append(dpf.core.SERVER)
29+
30+
31+
class DpfJob(DpfServer) # pragma: no cover
32+
def __init__(self, service_manager_url, job_name, channel):
33+
self.sm_url = service_manager_url
34+
self.job_name = job_name
35+
super().channel = channel
36+
37+
def shutdown(self):
38+
requests.delete(url=f'{self.sm_url}/jobs/{self.job_name}')
39+
40+
def __del__(self):
41+
try:
42+
self.shutdown()
43+
except:
44+
pass

ansys/dpf/core/available_result.py

+77
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,77 @@
1+
from warnings import warn
2+
from ansys.grpc.dpf import available_result_pb2, base_pb2
3+
from ansys.dpf.core.common import remove_spaces
4+
5+
class AvailableResult:
6+
"""A class used to represent an Available result which can be
7+
requested via an operator
8+
9+
Parameters
10+
----------
11+
availableresult : AvailableResult message
12+
"""
13+
14+
def __init__(self, availableresult):
15+
"""Initialize the AvailableResult with an availableResult message"""
16+
self._message = availableresult
17+
18+
def __str__(self):
19+
txt = self.name+'\n' +\
20+
'Operator name: "%s"\n' % self.operator_name +\
21+
'Number of components: %d\n' % self.n_components +\
22+
'Dimensionality: %s\n' % self.dimensionality +\
23+
'Homogeneity: %s\n' % self.homogeneity
24+
if self.unit:
25+
txt += 'Units: %s\n' % self.unit
26+
return txt
27+
28+
@property
29+
def name(self):
30+
"""Result operator"""
31+
return remove_spaces(self._message.physicsname)
32+
33+
@property
34+
def n_components(self):
35+
"""Number of components of the results"""
36+
return self._message.ncomp
37+
38+
@property
39+
def dimensionality(self):
40+
"""Dimensionality nature of the result (vector, scalar, tensor...)"""
41+
return base_pb2.Nature.Name(self._message.dimensionality).lower()
42+
43+
@property
44+
def homogeneity(self):
45+
"""Homogeneity of the result"""
46+
try:
47+
homogeneity = self._message.homogeneity
48+
return available_result_pb2.Homogeneity.Name(homogeneity).lower()
49+
except ValueError as exception:
50+
warn(str(exception))
51+
return ''
52+
53+
@property
54+
def unit(self):
55+
"""Unit of the result"""
56+
return self._message.unit.lower()
57+
58+
@property
59+
def operator_name(self):
60+
"""Name of the corresponding operator"
61+
"""
62+
return self._message.name
63+
64+
@property
65+
def sub_results(self):
66+
"""List of sub result"
67+
"""
68+
rep_sub_res =self._message.sub_res
69+
list = []
70+
for sub_res in rep_sub_res:
71+
try :
72+
int(sub_res.name)
73+
dict ={"name":"principal"+sub_res.name, "operator name":sub_res.op_name, "description":sub_res.description }
74+
except :
75+
dict ={"name":sub_res.name, "operator name":sub_res.op_name, "description":sub_res.description }
76+
list.append(dict)
77+
return list

0 commit comments

Comments
 (0)