Skip to content

Commit

Permalink
Major refactoring
Browse files Browse the repository at this point in the history
  • Loading branch information
jkarns275 committed May 29, 2019
1 parent c759285 commit 73a7de9
Show file tree
Hide file tree
Showing 60 changed files with 8,136 additions and 7,540 deletions.
22 changes: 13 additions & 9 deletions src/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,24 +6,25 @@
import sys
from multiprocessing import freeze_support

from PyQt5 import QtWidgets, QtCore
from PyQt5 import QtCore, QtWidgets

from metadata.config import Config
# This should be imported first: it verifies the correct python version is running, and moves the
# current working directory to wherever it ought to be. Importing things for their side-effects
# is probably bad practice, but so is using python.
from startup import verify_internet_connection_and_obtain_api_key

from utils.log import TextReceiver
from windows.main_window import MainWindow
from worker.hapi_thread import HapiThread
from worker.hapi_worker import HapiWorker
from worker.work_request import WorkRequest
from worker.hapi_thread import HapiThread
from utils.metadata.config import Config
from utils.log import TextReceiver


# Create the data folder if it doesn't exist.
if not os.path.exists(Config.data_folder):
os.makedirs(Config.data_folder)


# This is not necessary right now but will be helpful of the behavior of
# the QApplication needs to be modified.
# class App(QtWidgets.QApplication):
Expand All @@ -36,6 +37,7 @@ def main():
"""
if len(sys.argv) > 1 and sys.argv[1] == 'test':
import test

test.run_tests()
return 0

Expand All @@ -58,7 +60,7 @@ def main():
if not verify_internet_connection_and_obtain_api_key():
return 0

from utils.metadata.molecule import MoleculeMeta
from metadata.molecule_meta import MoleculeMeta

WorkRequest.start_work_process()

Expand All @@ -67,7 +69,8 @@ def main():
# start.start() # When a start_hapi request is sent, it starts automatically.

_ = MoleculeMeta(0)
from utils.metadata.xsc import CrossSectionMeta
from metadata.xsc_meta import CrossSectionMeta

# If the cache is expired, download a list of the cross section meta file.
# This also populates the CrossSectionMeta.molecule_metas field.
_ = CrossSectionMeta(0)
Expand All @@ -82,17 +85,18 @@ def main():
_qt_result = app.exec_()

TextReceiver.redirect_close()
close = HapiWorker(WorkRequest.END_WORK_PROCESS, {}, callback=None)
close = HapiWorker(WorkRequest.END_WORK_PROCESS, { }, callback = None)
close.safe_exit()
WorkRequest.WORKER.process.join()
HapiThread.kill_all()
sys.exit(0)
return 0


if __name__ == '__main__':
freeze_support()
# try:
# main()
#except Exception as error:
# except Exception as error:
# debug(error)
main()
4 changes: 3 additions & 1 deletion src/utils/graphing/band.py → src/data_structures/bands.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,13 @@
from typing import *


class Band:
def __init__(self, nu: List[float], sw: List[float], band_id: str):
self.x = nu
self.y = sw
self.band_id = band_id


class Bands:

def __init__(self, bands: List[Band], table_name: str = ''):
Expand All @@ -14,4 +16,4 @@ def __init__(self, bands: List[Band], table_name: str = ''):
self.use_scatter_plot = True

def add_band(self, band: Band):
self.bands.append(band)
self.bands.append(band)
62 changes: 39 additions & 23 deletions src/utils/cache.py → src/data_structures/cache.py
Original file line number Diff line number Diff line change
@@ -1,39 +1,49 @@
import errno
import json
import time
from datetime import timedelta
from typing import Callable, Any, Union, Optional

import json
from typing import Any, Callable, Optional, Union

from metadata.config import Config
from utils.log import err_log
from utils.metadata.config import Config


class Cache:
"""
This is a small utility class for caching web results. When caching for the first time, it will download the a file
by calling the web_fetch_routine (that is, you still have to write that part yourself). This result (if everything
went smoothly) will then be stored in a file and given a timestamp that represents the unix-timestamp when that
This is a small utility class for caching web results. When caching for the first time,
it will download the a file
by calling the web_fetch_routine (that is, you still have to write that part yourself). This
result (if everything
went smoothly) will then be stored in a file and given a timestamp that represents the
unix-timestamp when that
cache item should be re-downloaded.
This is not meant to be an extremely time-precise cache. It is going to be used for things that should be
This is not meant to be an extremely time-precise cache. It is going to be used for things
that should be
re-downloaded daily.
"""

##
# Where all cached files are stored. This is to be a subdirectory in the data directory as defined in
# Where all cached files are stored. This is to be a subdirectory in the data directory as
# defined in
# the use Config.
CACHE_ROOT = ".cache"

def __init__(self, path: str, web_fetch_routine: Callable[[], Union[str, bytes, Any]], lifetime: timedelta):
def __init__(self, path: str, web_fetch_routine: Callable[[], Union[str, bytes, Any]],
lifetime: timedelta):
"""
:param path: The path, starting from the cache root, of the file that the cache should be located or stored.
:param web_fetch_routine: A function which will return a string on success, and something else otherwise. The
something else can be retrieved using the err function. web_fetch_routine should not throw any
exceptions, and should return a string or bytes on success, and anything else will be considered a
:param path: The path, starting from the cache root, of the file that the cache should be
located or stored.
:param web_fetch_routine: A function which will return a string on success, and something
else otherwise. The
something else can be retrieved using the err function. web_fetch_routine should
not throw any
exceptions, and should return a string or bytes on success, and anything else
will be considered a
failure.
:param lifetime: A duration, after which, the cached results should be thrown out and re-retrieved. Therefore it
:param lifetime: A duration, after which, the cached results should be thrown out and
re-retrieved. Therefore it
is the lifetime of the cached data!
"""

Expand All @@ -49,9 +59,11 @@ def __init__(self, path: str, web_fetch_routine: Callable[[], Union[str, bytes,

def __load_from_file(self) -> bool:
"""
Attempts to load the cache from a file. If the containing directories don't exist they're created, and then the
Attempts to load the cache from a file. If the containing directories don't exist they're
created, and then the
__load_from_web function will be called.
:return: Returns False if the function failed to load the file. This either means it doesn't exist or something
:return: Returns False if the function failed to load the file. This either means it
doesn't exist or something
weird happened
"""
import os.path
Expand All @@ -69,7 +81,8 @@ def __load_from_file(self) -> bool:
with open(self.path, 'r') as file:
text = file.read() # This reads whole contents of the file.
parsed = json.loads(text)
# This means the lifetime of the cache has expired (parsed['timestamp'] contains the unix timestamp of
# This means the lifetime of the cache has expired (parsed['timestamp'] contains
# the unix timestamp of
# when the file was written added to the number of seconds before expiration).
if int(time.time()) > parsed['timestamp']:
return False
Expand Down Expand Up @@ -99,8 +112,8 @@ def __load_from_web(self) -> bool:
with open(self.path, 'w+') as file:
file.write(json.dumps({
'timestamp': int(time.time()) + self.lifetime,
'cached': self.cached
}))
'cached': self.cached
}))
except Exception as e:
print('Failed to write to CrossSectionMeta cache: {}'.format(str(e)))
return True
Expand All @@ -119,7 +132,8 @@ def ok(self) -> bool:

def data(self) -> str:
"""
:return: The cached data, if it exists. If `self.ok()` returns true this should return a str. Otherwise, it
:return: The cached data, if it exists. If `self.ok()` returns true this should return a
str. Otherwise, it
will return None.
"""
return self.cached
Expand All @@ -130,12 +144,14 @@ class JsonCache(Cache):
Just like `Cache`, except it parses the data as if it were JSON before returning it.
"""

def __init__(self, path: str, web_fetch_routine: Callable[[], Union[str, Any]], lifetime: timedelta):
def __init__(self, path: str, web_fetch_routine: Callable[[], Union[str, Any]],
lifetime: timedelta):
Cache.__init__(self, path, web_fetch_routine, lifetime)

def data(self) -> Any:
"""
:return: The cached data as parsed JSON. Will return None if there is no cached data (i.e. something went wrong)
:return: The cached data as parsed JSON. Will return None if there is no cached data (
i.e. something went wrong)
or if the data is invalid JSON.
"""
try:
Expand Down
11 changes: 6 additions & 5 deletions src/utils/lines.py → src/data_structures/lines.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from typing import *

from utils.metadata.config import *
from metadata.config import *
from worker.hapi_worker import HapiWorker


Expand Down Expand Up @@ -50,7 +50,7 @@ def __init__(self, table: Dict[str, Any]):
if self.last_page * self.page_len != self.table_len:
self.last_page_len = self.table_len - self.last_page * self.page_len
self.last_page += 1

self.page_number = 1
self.param_order = table['header']['order']

Expand All @@ -71,18 +71,18 @@ def get_line(self, line_number: int) -> Optional['Line']:
"""
"""
line = []

# index of the `line_number`th field of page
index = line_number + ((self.page_number - 1) * self.page_len)
for param in self.param_order:
line.append(self.data[param][index])
l = Line(line_number + (self.page_number - 1) * self.page_len, line, self)
return l


def set_page(self, page_number):
self.page_number = page_number


class Line:
def __init__(self, line_index: int, line: List[Union[int, float, str]], lines: 'Lines'):
self.line_index = line_index
Expand All @@ -93,7 +93,8 @@ def __init__(self, line_index: int, line: List[Union[int, float, str]], lines: '

def update_nth_field(self, field_index: int, new_value: Union[int, float, str]):
"""
*Given params: (self), int field_index, and a new values : [int,float], updates a field for the Line class.*
*Given params: (self), int field_index, and a new values : [int,float], updates a field
for the Line class.*
"""
self.data[self.param_order[field_index]][self.line_index] = new_value

Expand Down
28 changes: 17 additions & 11 deletions src/utils/xsc.py → src/data_structures/xsc.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,16 @@
from typing import List, Optional, Any, Dict, Tuple, Iterable
import sys
from typing import Iterable, List, Optional, Tuple

from metadata.xsc_meta import CrossSectionMeta

from utils.metadata.xsc import CrossSectionMeta

class CrossSection:
"""
Represents a parsed cross section. This is just a data class so it doesn't get represented by a dictionary.
Represents a parsed cross section. This is just a data class so it doesn't get represented by
a dictionary.
"""

def __init__(self, nu: Iterable[float], abscoef: Iterable[float], step: float, numin: float, numax: float,
def __init__(self, nu: Iterable[float], abscoef: Iterable[float], step: float, numin: float,
numax: float,
molecule: str, len: int, pressure: float, temp: float):
self.nu = tuple(nu)
self.abscoef = tuple(abscoef)
Expand Down Expand Up @@ -55,7 +57,9 @@ def parse(data: str) -> Optional[CrossSection]:
step = (max_wavenum - numin) / float(num_points)
x = list(map(lambda n: numin + float(n) * step, range(0, num_points)))

return CrossSection(x, y, step, numin, max_wavenum, molecule, num_points, pressure, temperature)
return CrossSection(x, y, step, numin, max_wavenum, molecule, num_points, pressure,
temperature)


class CrossSectionFilter:

Expand All @@ -71,7 +75,7 @@ def get_cross_sections(self) -> List[str]:
if self.molecule_id not in CrossSectionMeta.molecule_metas:
return []
return [item['filename']
for item in CrossSectionMeta.molecule_metas[self.molecule_id]
for item in CrossSectionMeta.molecule_metas[self.molecule_id]
if self.xsc_is_conformant(item)]

def xsc_is_conformant(self, xsc) -> bool:
Expand All @@ -81,7 +85,9 @@ def xsc_is_conformant(self, xsc) -> bool:
:return: True if the supplied cross-section satisfies all of the
conditions of this filter, otherwise false.
"""
return (self.pressure_range is None or (self.pressure_range[0] < xsc['pressure'] < self.pressure_range[1])) \
and (self.temp_range is None or (self.temp_range[0] < xsc['temperature'] < self.temp_range[1])) \
and (self.wn_range is None or (xsc['numin'] < self.wn_range[0] and xsc['numax'] > self.wn_range[1]))

return (self.pressure_range is None or (
self.pressure_range[0] < xsc['pressure'] < self.pressure_range[1])) \
and (self.temp_range is None or (
self.temp_range[0] < xsc['temperature'] < self.temp_range[1])) \
and (self.wn_range is None or (
xsc['numin'] < self.wn_range[0] and xsc['numax'] > self.wn_range[1]))
File renamed without changes.
9 changes: 9 additions & 0 deletions src/graphing/graph_type.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
from enum import Enum


class GraphType(Enum):
ABSORPTION_SPECTRUM = 0
TRANSMITTANCE_SPECTRUM = 1
RADIANCE_SPECTRUM = 2
ABSORPTION_COEFFICIENT = 3
BANDS = 4
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
from PyQt5.QtChart import QLineSeries, QAbstractAxis, QChart, QScatterSeries
from PyQt5.QtGui import QPen, QBrush, QColor
from PyQt5.QtChart import QAbstractAxis, QChart, QLineSeries, QScatterSeries
from PyQt5.QtGui import QBrush, QColor, QPen


class HapiSeries:
Expand All @@ -9,15 +9,17 @@ def create_series(self):
return QScatterSeries()
return QLineSeries()

def __init__(self, x = (), y = (), use_scatter_plot = True, name = ""):
def __init__(self, x=(), y=(), use_scatter_plot=True, name=""):
self.use_scatter_plot = use_scatter_plot
if len(x) >= 2:
self.step = x[1] - x[0]
self.series = self.create_series()
for i in range(0, len(x)):
# Since qt won't graph a chart using a log scale if there is a negative or zero value,
# Since qt won't graph a chart using a log scale if there is a negative or zero
# value,
# make sure everything is > 0.
# This shouldn't be a problem since all of the graph types work with positive quantities.
# This shouldn't be a problem since all of the graph types work with positive
# quantities.
if y[i] < 1e-138:
self.append(x[i], 1e-138)
else:
Expand Down Expand Up @@ -47,7 +49,8 @@ def isVisible(self) -> bool:

def internal_copy(self):
"""
Makes a copy of the underlying series. This is needed because after removing a series from a chart,
Makes a copy of the underlying series. This is needed because after removing a series
from a chart,
Qt deallocates the QLineSeries.
"""
new_series = self.create_series()
Expand Down
Loading

0 comments on commit 73a7de9

Please sign in to comment.