Skip to content

Commit

Permalink
install test
Browse files Browse the repository at this point in the history
  • Loading branch information
shinokumura committed Sep 15, 2023
1 parent 64157e0 commit c453b86
Show file tree
Hide file tree
Showing 39 changed files with 2,846 additions and 1,550 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -167,3 +167,4 @@ requirements_old.txt
exfor_dictionary/
endflib_sql/
sql/
uninstall.txt
3 changes: 3 additions & 0 deletions .gitmodules
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
[submodule "submodules"]
path = submodules
url = https://github.com/shinokumura/exparser-submodule.git
1 change: 1 addition & 0 deletions VERSION
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
2.0.0beta
6 changes: 4 additions & 2 deletions app.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
app = dash.Dash(
__name__,
external_stylesheets=[dbc.themes.CERULEAN],
url_base_pathname="/dataexplorer/",
url_base_pathname="/dataexplorer2/",
suppress_callback_exceptions=True,
meta_tags=[
{
Expand All @@ -35,12 +35,13 @@
],
use_pages=True,
)

else:
app = dash.Dash(
__name__,
external_stylesheets=[dbc.themes.JOURNAL],
routes_pathname_prefix="/", # if Prod
requests_pathname_prefix="/dataexplorer/", # if Prod
requests_pathname_prefix="/dataexplorer2/", # if Prod
suppress_callback_exceptions=True,
meta_tags=[
{
Expand All @@ -55,6 +56,7 @@
server = app.server # for PROD/INT env
app.title = "LIBRARIES-2022 Data Explorer"

# print(app.config["url_base_pathname"])

app.layout = html.Div([dash.page_container])

Expand Down
168 changes: 101 additions & 67 deletions common.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,20 +14,38 @@
import re
import dash
import dash_bootstrap_components as dbc
from dash import html, dcc
from dash import html
from dash.exceptions import PreventUpdate
import urllib.parse
# from urllib.parse import urlencode

# from app import app

from man import manual
from libraries.datahandle.list import (
PARTICLE,
ELEMS,
reaction_list,
elemtoz_nz,
read_mass_range,
)
from submodules.utilities.elem import ELEMS, elemtoz_nz
from submodules.utilities.mass import mass_range
from libraries.datahandle.list import reaction_list


# ------------------------------------------------------------------------------
# Incident particles
# ------------------------------------------------------------------------------

PARTICLE = ["N", "P", "D", "T", "A", "H", "G"]
PARTICLE_FY = ["N", "0", "P", "D", "T", "A", "H", "G"]


# ------------------------------------------------------------------------------
# Isomeric state
# ------------------------------------------------------------------------------

ISOMERIC = ["", "g", "m", "m2"]


# ------------------------------------------------------------------------------
# Name of libraries used in each app
# ------------------------------------------------------------------------------

url_basename = dash.get_relative_path("/")


toast = html.Div(
[
Expand Down Expand Up @@ -63,7 +81,7 @@
dbc.Col(
html.A(
[
html.Img(src=dash.get_asset_url("logo.png"), height="40px"),
html.Img(src=dash.get_asset_url("logo.png"), height="30px"),
],
href="https://nds.iaea.org",
)
Expand All @@ -77,16 +95,15 @@


page_urls = {
# "Libraries-2022": "/dataexplorer/",
"Libraries-2023": "/dataexplorer/reactions/xs",
"EXFOR": "/dataexplorer/exfor",
"Libraries-2023": url_basename + "reactions/xs",
"EXFOR": url_basename + "exfor",
}


lib_selections = [
{
"label": "Cross Section (XS)",
"value": "SIG",
"value": "XS",
},
{
"label": "Residual Production XS",
Expand All @@ -103,12 +120,12 @@


lib_page_urls = {
"SIG": "/dataexplorer/reactions/xs",
"Residual": "/dataexplorer/reactions/residual",
"FY": "/dataexplorer/reactions/fy",
"DA": "/dataexplorer/reactions/da",
"DE": "/dataexplorer/reactions/de",
"FIS": "/dataexplorer/reactions/fission",
"XS": url_basename + "reactions/xs",
"Residual": url_basename + "reactions/residual",
"FY": url_basename + "reactions/fy",
"DA": url_basename + "reactions/da",
"DE": url_basename + "reactions/de",
"FIS": url_basename + "reactions/fission",
}


Expand All @@ -130,10 +147,51 @@
libs_navbar = html.Div(
[
html.H5(html.B("IAEA Nuclear Data Explorer")),
html.P("Libraries 2023", style={"font-size": "medium"}),
html.P(
"Data have been renewed using a new exfor_parse",
style={"font-size": "smaller", "color": "gray"},
dbc.Row(
[
dbc.Col(
html.Div(
[
html.A(
[
html.Img(
src=dash.get_asset_url("logo.png"),
height="20px",
),
],
href="https://nds.iaea.org",
),
" Libraries 2023",
],
),
width=2,
style={"font-size": "medium"},
),
dbc.Col(
html.Div(
[
"Buildt with ",
html.A(
"endftables",
href="https://nds.iaea.org/talys/",
# className="text-dark",
),
" and ",
html.A(
"exforparser",
href="https://github.com/shinokumura/exforparser",
# className="text-dark",
),
".",
],
style={
"font-size": "smaller",
"color": "gray",
"text-align": "left",
},
),
),
]
),
]
)
Expand Down Expand Up @@ -182,47 +240,6 @@
)


def dict_merge(dicts_list):
d = {**dicts_list[0]}
for entry in dicts_list[1:]:
# print("entry:", entry)
for k, v in entry.items():
d[k] = (
[d[k], v]
if k in d and type(d[k]) != list
else [*d[k] + v]
if k in d
else v
)
return d


def data_length_unify(data_dict):
data_len = []
new_list = []
data_list = data_dict["data"]

for i in data_list:
data_len += [len(i)]

for l in range(len(data_len)):
if data_len[l] < max(data_len):
data_list[l] = data_list[l] * max(data_len)

## Check if list length are all same
it = iter(data_list)
the_next = len(next(it))
assert all(len(l) == the_next for l in it)

## overwrite the "data" block by extended list
data_dict["data"] = data_list

return data_dict


mass_range = read_mass_range()


def input_check(type, elem, mass, reaction):
if not type or not elem or not mass or not reaction:
# if any(not i for i in (type, elem, mass, reaction)):
Expand Down Expand Up @@ -290,4 +307,21 @@ def remove_query_parameter(url, param):
query_params = urllib.parse.parse_qs(url_parts.query)
query_params.pop(param, None)
updated_query = urllib.parse.urlencode(query_params, doseq=True)
return url_parts._replace(query=updated_query).geturl()
return url_parts._replace(query=updated_query).geturl()


def limit_number_of_datapoints(points, df):
if points <= 100:
return df

elif 100 < points <= 1000:
nskip = int(points / 100)

return df.iloc[::nskip, :]

elif points > 1000:
nskip = int(points / 1000)
return df.iloc[::nskip, :]

else:
return df
53 changes: 24 additions & 29 deletions config.py
Original file line number Diff line number Diff line change
@@ -1,61 +1,56 @@
import sys
import os
import sqlalchemy as db
from sqlalchemy.orm import sessionmaker
import site

DEVENV = False
DEVENV = True

if DEVENV:
## Application file location
TOP_DIR = "/Users/okumuras/Dropbox/Development/dataexplorer2/"

## Dependent modules location
MODULES_DIR = "/Users/okumuras/Dropbox/Development/"

## Data directory linked from the code
DATA_DIR = "/Users/okumuras/Documents/nucleardata/EXFOR/"

## URLs to generate links inside the application
BASE_URL = "http://127.0.0.1:8050/dataexplorer"
# BASE_URL = "http://127.0.0.1:8050/dataexplorer2"
API_BASE_URL = "http://127.0.0.1:5000/"


else:
## Application file location
TOP_DIR = "/srv/www/dataexplorer2/"

## Dependent modules location
MODULES_DIR = "/srv/data/dataexplorer2/"

## Data directory linked from the code
DATA_DIR = "/srv/data/dataexplorer2/"

## URLs to generate links inside the application
BASE_URL = "https://int-nds.iaea.org/dataexplorer2"
API_BASE_URL = BASE_URL

# BASE_URL = "https://int-nds.iaea.org/dataexplorer2"
# API_BASE_URL = BASE_URL

MT_PATH_JSON = TOP_DIR + "libraries/datahandle/mf3.json"
MT50_PATH_JSON = TOP_DIR + "libraries/datahandle/mt50.json"
MAPPING_FILE = TOP_DIR + "exfor/datahandle/mapping.json"


EXFOR_DB = DATA_DIR + "exfor.sqlite"
ENDFTAB_DB = DATA_DIR + "endftables.sqlite"
MASTER_GIT_REPO_PATH = DATA_DIR + "exfor_master"
EXFORTABLES_PY_GIT_REPO_PATH = DATA_DIR + "exfortables_py"
## Package locations
SITE_DIR = site.getsitepackages()[0]
EXFOR_PARSER = os.path.join(SITE_DIR, "exforparser")
EXFOR_DICTIONARY = os.path.join(SITE_DIR, "exfor_dictionary")
ENDF_TABLES = os.path.join(SITE_DIR, "endftables_sql")
RIPL3 = os.path.join(SITE_DIR, "ripl3_json")


## Package locations
EXFOR_PARSER = "exforparser/"
EXFOR_DICTIONARY = "exfor_dictionary/"
ENDF_TABLES = "endftables_sql/"
RIPL3 = "ripl3_json/"
## Define the location of files ussed in the interface
MT_PATH_JSON = os.path.join(EXFOR_PARSER, "tabulated/mf3.json")
MT50_PATH_JSON = os.path.join(EXFOR_PARSER, "tabulated/mt50.json")

MAPPING_FILE = os.path.join(TOP_DIR, "exfor/datahandle/mapping.json")
MASS_RANGE_FILE = os.path.join(TOP_DIR, "submodules/utilities/A_min_max.txt")

sys.path.append(os.path.join(MODULES_DIR, EXFOR_PARSER))
sys.path.append(os.path.join(MODULES_DIR, EXFOR_DICTIONARY))
sys.path.append(os.path.join(MODULES_DIR, ENDF_TABLES))
## Define the location of data files
EXFOR_DB = os.path.join(DATA_DIR, "exfor.sqlite")
ENDFTAB_DB = os.path.join(DATA_DIR, "endftables.sqlite")
MASTER_GIT_REPO_PATH = os.path.join(DATA_DIR, "exfor_master")
EXFOR_JSON_GIT_REPO_PATH = os.path.join(DATA_DIR, "exfor_json")
EXFORTABLES_PY_GIT_REPO_PATH = os.path.join(DATA_DIR, "exfortables_py")
ENDFTABLES_PATH = "/Users/okumuras/Documents/nucleardata/libraries/libraries.all/"


MASTER_GIT_REPO_URL = "https://github.com/IAEA-NDS/exfor_master/"
Expand Down Expand Up @@ -86,7 +81,7 @@
"endftables": db.create_engine("sqlite:///" + ENDFTAB_DB),
}

engine = db.create_engine("sqlite:///" + EXFOR_DB) # , echo= True)
engine = db.create_engine("sqlite:///" + EXFOR_DB, echo=True)


session = sessionmaker(autocommit=False, autoflush=True, bind=engines["exfor"])
Expand Down
Loading

0 comments on commit c453b86

Please sign in to comment.