diff --git a/.coverage b/.coverage index dee71bc..6463e1a 100644 Binary files a/.coverage and b/.coverage differ diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json new file mode 100644 index 0000000..8d96444 --- /dev/null +++ b/.devcontainer/devcontainer.json @@ -0,0 +1,26 @@ +// For format details, see https://aka.ms/devcontainer.json. For config options, see the +// README at: https://github.com/devcontainers/templates/tree/main/src/docker-existing-dockerfile +{ + "name": "Existing Dockerfile", + "build": { + // Sets the run context to one level up instead of the .devcontainer folder. + "context": "..", + // Update the 'dockerFile' property if you aren't using the standard 'Dockerfile' filename. + "dockerfile": "../Dockerfile" + } + + // Features to add to the dev container. More info: https://containers.dev/features. + // "features": {}, + + // Use 'forwardPorts' to make a list of ports inside the container available locally. + // "forwardPorts": [], + + // Uncomment the next line to run commands after the container is created. + // "postCreateCommand": "cat /etc/os-release", + + // Configure tool-specific properties. + // "customizations": {}, + + // Uncomment to connect as an existing user other than the container default. More info: https://aka.ms/dev-containers-non-root. + // "remoteUser": "devcontainer" +} diff --git a/coverage.xml b/coverage.xml index eeb38d0..386fc1c 100644 --- a/coverage.xml +++ b/coverage.xml @@ -1,141 +1,198 @@ - + - /home/jvivian/covid19-drDFM/covid19_drdfm + /Users/jvivian/Library/CloudStorage/GoogleDrive-jtvivian@gmail.com/My Drive/projects/covid19-drDFM/covid19_drdfm - + - - - + + - - - - + + + + - - + + - + - + + - + + + + + + + + + + + - - + - - + + + + - + - - + + + + - - - + - - - - + + + + + + - - - - - - - - - + + + + + + + + + - - - - + + + + + + - - - - - - - - - - + + + + + + + + + + + + + + + + + + - - + + - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/covid19_drdfm/cli.py b/covid19_drdfm/cli.py index 01eecf1..de04df8 100644 --- a/covid19_drdfm/cli.py +++ b/covid19_drdfm/cli.py @@ -9,6 +9,7 @@ Process data and generate parquet DataFrame - `c19_dfm process ./outfile.xlsx` """ + from pathlib import Path import typer diff --git a/covid19_drdfm/constants.py b/covid19_drdfm/constants.py new file mode 100644 index 0000000..8f81179 --- /dev/null +++ b/covid19_drdfm/constants.py @@ -0,0 +1,147 @@ +NAME_MAP = { + "Pandemic_1": "Cases1", + "Pandemic_2": "Cases2", + "Pandemic_3": "Cases3", + "Pandemic_4": "Cases4", + "Pandemic_5": "Cases5", + "Pandemic_6": "Hosp1", + "Pandemic_7": "Hosp2", + "Pandemic_8": "Deaths1", + "Pandemic_9": "Deaths2", + "Pandemic_10": "Deaths3", + "Pandemic_11": "Deaths4", + "Pandemic_12": "Deaths5", + "Pandemic_Response_1": "Vax1", + "Pandemic_Response_2": "Vax2", + "Pandemic_Response_3": "Vax3", + "Pandemic_Response_4": "Gather1", + "Pandemic_Response_5": "Gather2", + "Pandemic_Response_6": "Gather3", + "Pandemic_Response_7": "Gather4", + "Pandemic_Response_8": "SaH", + "Pandemic_Response_9": "Curfew", + "Pandemic_Response_10": "Mask1", + "Pandemic_Response_11": "Mask2", + "Pandemic_Response_12": "School", + "Pandemic_Response_13": "ARP", + "Pandemic_Response_14": "PPP", + "Pandemic_Response_15": "CARES", + "Demand_1": "Cons1", + "Demand_2": "Cons2", + "Demand_3": "Cons3", + "Demand_4": "Cons4", + "Demand_5": "Cons5", + "Demand_6": "Employment1", + "Demand_7": "Employment2", + "Supply_1": "GDP", + "Supply_2": "UI", + "Supply_3": "PartR", + "Supply_4": "UR", + "Supply_5": "RPFI", + "Supply_6": "FixAss", + "Supply_7": "Prod", + "Monetary_1": "CPI", + "Monetary_2": "CPIU", + "Monetary_3": "PCE", + "Monetary_4": "PCEC", + "Monetary_5": "TBill1mo", + "Monetary_6": "TBill6mo", + "Monetary_7": "TBill1yr", + "Monetary_8": "TBill5yr", + "Monetary_9": "TBill10yr", + "Monetary_10": "TBill30yr", + "Monetary_11": "FFR", +} + +FACTORS = { + "Cases1": ("Global", "Pandemic"), + "Cases2": ("Global", "Pandemic"), + "Cases3": ("Global", "Pandemic"), + "Cases4": ("Global", "Pandemic"), + "Cases5": ("Global", "Pandemic"), + "Hosp1": ("Global", "Pandemic"), + "Hosp2": ("Global", "Pandemic"), + "Deaths1": ("Global", "Pandemic"), + "Deaths2": ("Global", "Pandemic"), + "Deaths3": ("Global", "Pandemic"), + "Deaths4": ("Global", "Pandemic"), + "Deaths5": ("Global", "Pandemic"), + "Vax1": ("Global", "Response"), + "Vax2": ("Global", "Response"), + "Vax3": ("Global", "Response"), + "Gather1": ("Global", "Response"), + "Gather2": ("Global", "Response"), + "Gather3": ("Global", "Response"), + "Gather4": ("Global", "Response"), + "SaH": ("Global", "Response"), + "Curfew": ("Global", "Response"), + "Mask1": ("Global", "Response"), + "Mask2": ("Global", "Response"), + "School": ("Global", "Response"), + "ARP": ("Global", "Response"), + "PPP": ("Global", "Response"), + "CARES": ("Global", "Response"), + "School": ("Global", "Response"), + "School": ("Global", "Response"), + "Cons1": ("Global", "Consumption"), + "Cons2": ("Global", "Consumption"), + "Cons3": ("Global", "Consumption"), + "Cons4": ("Global", "Consumption"), + "Cons5": ("Global", "Consumption"), + "Employment1": ("Global", "Employment"), + "Employment2": ("Global", "Employment"), + "UI": ("Global", "Employment"), + "PartR": ("Global", "Employment"), + "UR": ("Global", "Employment"), + "CPI": ("Global", "Inflation"), + "CPIU": ("Global", "Inflation"), + "PCE": ("Global", "Inflation"), + "PCEC": ("Global", "Inflation"), + "RPFI": ("Global", "Uncat"), + "FixAss": ("Global", "Uncat"), + "Prod": ("Global", "Uncat"), + "GDP": ("Global", "Uncat"), + "TBill1mo": ("Global", "Uncat"), + "TBill6mo": ("Global", "Uncat"), + "TBill1yr": ("Global", "Uncat"), + "TBill5yr": ("Global", "Uncat"), + "TBill10yr": ("Global", "Uncat"), + "TBill30yr": ("Global", "Uncat"), + "FFR": ("Global", "Uncat"), +} + +DIFF_COLS = [ + "Cases1", + "Cases2", + "Cases3", + "Cases4", + "Cases5", + "Hosp1", + "Hosp2", + "Deaths1", + "Deaths2", + "Deaths3", + "Deaths4", + "Deaths5", +] + +LOG_DIFF_COLS = [ + "Cons1", + "Cons2", + "Cons3", + "Cons4", + "Cons5", + "Employment1", + "Employment2", + "CPI", + "CPIU", + "PCE", + "PCEC", + "RPFI", + "FixAss", + "Prod", + "GDP", + "UI", +] + +UNMODIFIED_COLS = set(FACTORS).difference(DIFF_COLS).difference(LOG_DIFF_COLS) diff --git a/covid19_drdfm/dfm.py b/covid19_drdfm/dfm.py index 310a572..9fc880e 100644 --- a/covid19_drdfm/dfm.py +++ b/covid19_drdfm/dfm.py @@ -1,19 +1,18 @@ -# %% """Module for Dynamic Factor Model specification Main command to run model - `c19_dfm run` """ + from dataclasses import dataclass from pathlib import Path import pandas as pd import statsmodels.api as sm from rich import print as pprint -from sklearn.preprocessing import MinMaxScaler from statsmodels.tsa.stattools import adfuller -from covid19_drdfm.processing import adjust_pandemic_response, get_factors +from covid19_drdfm.constants import FACTORS @dataclass @@ -44,39 +43,30 @@ def state_process(df: pd.DataFrame, state: str) -> pd.DataFrame: const_cols = [x for x in df.columns if is_constant(df[x])] pprint(f"Constant Columns...dropping\n{const_cols}") df = df.drop(columns=const_cols) - df = adjust_pandemic_response(df) - return normalize(df) + return df -def normalize(df: pd.DataFrame) -> pd.DataFrame: - """Normalize data and make stationary - scaling for post-DFM Synthetic Control Model +def get_nonstationary_columns(df: pd.DataFrame) -> list[str]: + """Run AD-Fuller on tests and report failures Args: - df (pd.DataFrame): State data, pre-normalization + df (pd.DataFrame): Input DataFrame Returns: - pd.DataFrame: Normalized and stationary DataFrame + list[str]: List of columns that failed AD-Fuller test """ - df = df.drop(columns=["Time"]) if "Time" in df.columns else df - # Normalize data - scaler = MinMaxScaler() - norm_df = pd.DataFrame(scaler.fit_transform(df), columns=df.columns) * 100 - # stationary_df = norm_df.diff() - stationary_df = norm_df.diff().drop(0, axis=0) #! Dropping first after diff - # stationary_df = stationary_df.fillna(0) - non_stationary_columns = [] - for col in stationary_df.columns: - result = adfuller(stationary_df[col]) + for col in df.columns: + result = adfuller(df[col]) p_value = result[1] if p_value > 0.25: non_stationary_columns.append(col) - pprint("Columns that fail the ADF test (non-stationary):", non_stationary_columns) - return stationary_df + pprint(f"Columns that fail the ADF test (non-stationary)\n{non_stationary_columns}") + return non_stationary_columns -def run_model(df: pd.DataFrame, state: str, outdir: Path) -> sm.tsa.DynamicFactor: +def run_model(df: pd.DataFrame, state: str, outdir: Path): # -> sm.tsa.DynamicFactor: """Run DFM for a given state Args: @@ -84,99 +74,70 @@ def run_model(df: pd.DataFrame, state: str, outdir: Path) -> sm.tsa.DynamicFacto state (str): Two-letter state code to process outdir (str): Output directory for model CSV files - Returns: - sm.tsa.DynamicFactor: Dynamic Factor Model + # Returns: + # sm.tsa.DynamicFactor: Dynamic Factor Model """ - # Factors - factors = get_factors() - factor_multiplicities = {"Global": 2} - # Run model on a given state and print results df = state_process(df, state) + save_df(df, outdir, state) # Remove factors without an associated column + factors = FACTORS.copy() factor_keys = list(factors.keys()) [factors.pop(var) for var in factor_keys if var not in df.columns] - outdir.mkdir(exist_ok=True) - out = outdir / state - pprint(f"Saving state input information to {out}") - out.mkdir(exist_ok=True) - df.to_excel(out / "df.xlsx") - df.to_csv(out / "df.tsv", sep="\t") - if (out / "model.csv").exists(): - return + # Load cached model if exists + if (outdir / state / "model.csv").exists(): + model = sm.load(outdir / state / "model.csv") + return model, model.fit(disp=10) + # Try to run model, if it fails, note failure and return. Rust handles this so much better try: - model = sm.tsa.DynamicFactorMQ(df, factors=factors, factor_multiplicities=factor_multiplicities) - pprint(model.summary()) + factor_multiplicities = {"Global": 2} + model = sm.tsa.DynamicFactorMQ(df, factors=FACTORS, factor_multiplicities=factor_multiplicities) results = model.fit(disp=10) except Exception as e: with open(outdir / "failed_convergence.txt", "a") as f: f.write(f"{state}\t{e}\n") - return - pprint(results.summary()) - # Output - pprint(f"Saving output to {outdir}") - with open(out / "model.csv", "w") as f: - f.write(model.summary().as_csv()) - with open(out / "results.csv", "w") as f: - f.write(results.summary().as_csv()) + return None, None + # Save output + save_results(df, model, results, outdir=outdir / state, verbose=True) return model, results -def test_model(df: pd.DataFrame, state: str, outdir: Path) -> sm.tsa.DynamicFactor: - """Run DFM for a given state +def save_df(df: pd.DataFrame, outdir: Path, state: str): + """Save DataFrame as CSV / Excel Args: - df (pd.DataFrame): DataFrame processed via `covid19_drdfm.run` - state (str): Two-letter state code to process - outdir (str): Output directory for model CSV files + df (pd.DataFrame): Input DataFrame to model + outdir (Path): output directory + state (str): State to subset by + """ + outdir.mkdir(exist_ok=True) + state_dir = outdir / state + pprint(f"Saving state input information to {state_dir}") + state_dir.mkdir(exist_ok=True) + df.to_excel(state_dir / "df.xlsx") + df.to_csv(state_dir / "df.tsv", sep="\t") - Returns: - sm.tsa.DynamicFactor: Dynamic Factor Model +def save_results(df: pd.DataFrame, model, results, outdir: Path, verbose: bool = False): + """Save model and results to given directory + + Args: + df pd.DataFrame: _description_ + model (_type_): _description_ + results (_type_): _description_ + outdir (Path): _description_ + verbose (bool, optional): _description_. Defaults to False. """ - # Factors - factors = get_factors() - # factors = - # x[:-1]: y for x, y in factors.items() - # } # TODO: Fix in config to remove this now that multindex is removed - factor_multiplicities = {"Global": 2} - # Run model on a given state and print results - df = state_process(df, state) - drop_vars = ["proportion_vax2", "Proportion"] - new = df.drop(columns=drop_vars) - # [factors.pop(var) for var in drop_vars] - #! COLUMN REMOVAL - outdir.mkdir(exist_ok=True) - out = outdir / state - pprint(f"Saving state input information to {out}") - out.mkdir(exist_ok=True) - new.to_excel(out / "df.xlsx") - new.to_csv(out / "df.tsv", sep="\t") - if (out / "model.csv").exists(): - return - try: - model = sm.tsa.DynamicFactorMQ(new, factors=factors, factor_multiplicities=factor_multiplicities) + if verbose is True: pprint(model.summary()) - results = model.fit(disp=10) - except Exception as e: - with open(outdir / "failed.txt", "a") as f: - f.write(f"{state}\t{e}\n") - return - pprint(results.summary()) - # Output - pprint(f"Saving output to {outdir}") - with open(out / "model.csv", "w") as f: + pprint(results.summary()) + # Output + pprint(f"Saving output to {outdir}") + with open(outdir / "model.csv", "w") as f: f.write(model.summary().as_csv()) - with open(out / "results.csv", "w") as f: + with open(outdir / "results.csv", "w") as f: f.write(results.summary().as_csv()) - return model, results - - -# #%% -# from covid19_drdfm.processing import get_df - - -# df = get_df() -# model, results = run_model(df, 'AL', outdir=Path('./test-delete-NY-foo')) - -# # %% + non_stationary_cols = get_nonstationary_columns(df) + if non_stationary_cols: + with open(outdir / "non-stationary-columns.txt", "w") as f: + f.write("\n".join(non_stationary_cols)) diff --git a/covid19_drdfm/processing.py b/covid19_drdfm/processing.py index bfadd4f..8f1a22c 100644 --- a/covid19_drdfm/processing.py +++ b/covid19_drdfm/processing.py @@ -6,14 +6,18 @@ This model input DataFrame can be generated with a single function: - `df = run()` """ -import json + from fractions import Fraction from functools import reduce from pathlib import Path import fastparquet +import numpy as np import pandas as pd import yaml +from sklearn.preprocessing import MinMaxScaler + +from covid19_drdfm.constants import DIFF_COLS, LOG_DIFF_COLS, NAME_MAP ROOT_DIR = Path(__file__).parent.absolute() DATA_DIR = ROOT_DIR / "data/processed" @@ -36,25 +40,19 @@ def get_df() -> pd.DataFrame: .drop( columns=["Proportion", "proportion_vax2", "Pandemic_Response_8"] ) #! Columns removed per discussion with AC - # .assign(Pandemic_Response_4=lambda x: x[['Pandemic_Response_4', 'Pandemic_Response_5', 'Pandemic_Response_6', 'Pandemic_Response_7']].max(axis=1)) - # .assign(Pandemic_Response_10=lambda x: x[['Pandemic_Response_10', 'Pandemic_Response_11']].max(axis=1)) - # .drop(columns=['Pandemic_Response_5','Pandemic_Response_6', 'Pandemic_Response_7', 'Pandemic_Response_11']) .pipe(adjust_inflation) .pipe(add_datetime) + .pipe(fix_names) + .pipe(adjust_pandemic_response) + .pipe(diff_vars, cols=DIFF_COLS) + .pipe(diff_vars, cols=LOG_DIFF_COLS, log=True) + .fillna(0) + .pipe(normalize) + .drop(index=0) # Drop first row with NaNs from diff ) -def get_factors() -> dict[str, (str, str)]: - """Fetch pre-defined factors for model - - Returns: - dict[str, (str, str)]: Factors from `./data/processed/factors.yaml` - """ - with open(DATA_DIR / "factors.json") as f: - return json.load(f) - - -def write(df: pd.DataFrame, outpath: Path) -> Path: +def write(df: pd.DataFrame, outpath: Path): """Write dataframe given the extension""" ext = outpath.suffix if ext == ".xlsx": @@ -107,7 +105,7 @@ def adjust_pandemic_response(df: pd.DataFrame) -> pd.DataFrame: pd.DataFrame: Adjusted DataFrame """ govt_fund_dist = get_govt_fund_dist() - responses = [f"Pandemic_Response_{x}" for x in [13, 14, 15]] + responses = ["ARP", "PPP", "CARES"] for r in responses: df[r] = df[r].astype(float) i = df.index[df[r] > 0][0] @@ -118,7 +116,7 @@ def adjust_pandemic_response(df: pd.DataFrame) -> pd.DataFrame: def add_datetime(df: pd.DataFrame) -> pd.DataFrame: - """Sets `Time` column to `DateTime` dtype + """Set `Time` column to `DateTime` dtype Args: df (pd.DataFrame): Input DataFrame @@ -129,3 +127,55 @@ def add_datetime(df: pd.DataFrame) -> pd.DataFrame: df = df.assign(Month=pd.to_numeric(df.Period.apply(lambda x: x[1:]))).assign(Day=1) df["Time"] = pd.to_datetime({"year": df.Year, "month": df.Month, "day": df.Day}) return df.drop(columns=["Period", "Month", "Year", "Day"]) + + +def fix_names(df: pd.DataFrame) -> pd.DataFrame: + """Map sensible names to the merged input dataframe + + Args: + df (pd.DataFrame): Input DataFrame after merging all input data + + Returns: + pd.DataFrame: DataFrame with mapped names + """ + return df.rename(columns=NAME_MAP) + + +def diff_vars(df: pd.DataFrame, cols: list[str], log: bool = False) -> pd.DataFrame: + """Differences the set of variables within the dataframe + NOTE: Leaves a row with Nas + + + Args: + df (pd.DataFrame): Input DataFrame + cols (List[str]): List of columns to difference + log bool: Whether to take the log(difference) or not + + Returns: + pd.DataFrame: DataFrame with given vars differenced + """ + if log: + # df[cols] = np.log(df[cols]).diff().fillna(0).apply(lambda x: np.log(x + 0)) + df[cols] = df[cols].apply(lambda x: np.log(x + 1)).diff() + else: + df[cols] = df[cols].diff() + return df + + +def normalize(df: pd.DataFrame) -> pd.DataFrame: + """Normalize data and make stationary - scaling for post-DFM Synthetic Control Model + + Args: + df (pd.DataFrame): State data, pre-normalization + + Returns: + pd.DataFrame: Normalized and stationary DataFrame + """ + meta_cols = df[["State", "Time"]] + # df = df.drop(columns=["Time"]) if "Time" in df.columns else df + df = df.drop(columns=["State", "Time"]) + # Normalize data + scaler = MinMaxScaler() + new = pd.DataFrame(scaler.fit_transform(df), columns=df.columns) + new["State"] = meta_cols["State"] + return new diff --git a/covid19_drdfm/scm.py b/covid19_drdfm/scm.py new file mode 100644 index 0000000..d73d4fe --- /dev/null +++ b/covid19_drdfm/scm.py @@ -0,0 +1,26 @@ +from os import rename + +import numpy as np +import pandas as pd +from SyntheticControlMethods import DiffSynth, Synth + +# Import data +data_dir = "https://raw.githubusercontent.com/OscarEngelbrektson/SyntheticControlMethods/master/examples/datasets/" +df = pd.read_csv(data_dir + "smoking_data" + ".csv") + + +# Fit Differenced Synthetic Control +df = df.rename(columns={"cigsale": "Pandemic"}) +sc = Synth(df, "Pandemic", "state", "year", 1989, "California", n_optim=10, pen="auto") + +print(sc.original_data.weight_df) +print(sc.original_data.comparison_df) +print(sc.original_data.pen) + +# Visualize +sc.plot( + ["original", "pointwise", "cumulative"], + treated_label="California", + synth_label="Synthetic California", + treatment_label="Pandemic Response", +) diff --git a/covid19_drdfm/streamlit/runner.py b/covid19_drdfm/streamlit/runner.py index e8a7634..b424f04 100644 --- a/covid19_drdfm/streamlit/runner.py +++ b/covid19_drdfm/streamlit/runner.py @@ -12,6 +12,7 @@ from covid19_drdfm.dfm import state_process from covid19_drdfm.processing import get_df, get_factors +from covid19_drdfm.processing import NAME_MAP st.set_page_config(layout="wide") pio.templates.default = "plotly_white" @@ -31,6 +32,8 @@ "Inflation": ["Monetary_2", "Monetary_3", "Monetary_1"], "Pandemic": ["Pandemic_1", "Pandemic_2", "Pandemic_6", "Pandemic_9", "Pandemic_7", "Pandemic_10"], } +DEFAULTS = {NAME_MAP[x]: [NAME_MAP[z] for z in y] for x, y in DEFAULTS.items() if x in NAME_MAP in NAME_MAP} +print(DEFAULTS) def center_title(text): diff --git a/poetry.lock b/poetry.lock index 56f83f8..2065adc 100644 --- a/poetry.lock +++ b/poetry.lock @@ -256,6 +256,27 @@ files = [ {file = "charset_normalizer-3.2.0-py3-none-any.whl", hash = "sha256:8e098148dd37b4ce3baca71fb394c81dc5d9c7728c95df695d2dca218edf40e6"}, ] +[[package]] +name = "clarabel" +version = "0.6.0" +description = "Clarabel Conic Interior Point Solver for Rust / Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "clarabel-0.6.0-cp37-abi3-macosx_10_7_x86_64.whl", hash = "sha256:4f366de79b8bc66bef8dc170987840b672ccab9222e710c09536d78ef47f606d"}, + {file = "clarabel-0.6.0-cp37-abi3-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:edcebbfc14073cd32bfb664317fd2555716c96be8b2a54efdb2b728453582bea"}, + {file = "clarabel-0.6.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e737d2818b9ca10e92ccd3fa9ad1a805b039976016415a0c45adef3427d70792"}, + {file = "clarabel-0.6.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e0b1891d8e507eb0bfc7e0b981584c388b2ab28658056e600997dbbc23f1ab4"}, + {file = "clarabel-0.6.0-cp37-abi3-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9946d3b5db346421b6d839d868e7b1151b590f871344fe95113bfd55b5be2433"}, + {file = "clarabel-0.6.0-cp37-abi3-win32.whl", hash = "sha256:73ed408c975a8ea021c3d8262d5d023a18e1ac3f6bb59a37cd69a11dba8f86ed"}, + {file = "clarabel-0.6.0-cp37-abi3-win_amd64.whl", hash = "sha256:5a6be4df9fed98b6f73f034836def913a1ecd52e8b79ca230ddf7cd66ebcdee7"}, + {file = "clarabel-0.6.0.tar.gz", hash = "sha256:ef909a393e72981ca10b1d866d9cc7fb6295ece20ae035def764338894961184"}, +] + +[package.dependencies] +numpy = "*" +scipy = "*" + [[package]] name = "click" version = "8.1.7" @@ -482,6 +503,64 @@ files = [ [package.extras] dev = ["black (==22.3.0)", "hypothesis", "numpy", "pytest (>=5.30)", "pytest-xdist"] +[[package]] +name = "cvxpy" +version = "1.4.1" +description = "A domain-specific language for modeling convex optimization problems in Python." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cvxpy-1.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:03588055b660c043848f5281fe24dbd21f005b34bd8bd3b56906d8ad457c14ae"}, + {file = "cvxpy-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:315609ff96adeda4970471b349bc19d44ff4043e15630cf5ac70c029658fe8fc"}, + {file = "cvxpy-1.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55e08ffb973d62b3fabc675ad464cb6013ea5ce69799f330b33a084a2e580d8d"}, + {file = "cvxpy-1.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f1482558b785f2db51c76b9c6e91cc85dbd146675b126a799e7d7aab5b15354"}, + {file = "cvxpy-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:2f84687d15d11f9b49ca902f20103a2076efd47773c399cace71237ef53cdadc"}, + {file = "cvxpy-1.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d6bfbd535fdaabc5fa55f28de7a1d40f3a803a27fe3fec86e90700fa159a3afc"}, + {file = "cvxpy-1.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:71a95aaccf22431fd25a63bcb12d583e1b0baeaeb4fafa3e25857cec03b9e2f3"}, + {file = "cvxpy-1.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9d3bae3bf31e4eb6ed6407f78c6bc3c7bc4b4145cdbbb9ba8c61c3fc541d7067"}, + {file = "cvxpy-1.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:41cfaecf86f85162ca53c7be7377b4143e316204fb9b6a7df8b7a08c826e3806"}, + {file = "cvxpy-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:edf66010e49b64d3f2dd1a7abde8fa3e615ce7a2b3eb185ab744b0beb3a6adb9"}, + {file = "cvxpy-1.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6b0f17dca85b2a410e73f5d84b28f35f57a20cfec1b0adc9b16f0f8aabff9961"}, + {file = "cvxpy-1.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9318c4e679b3db470e76e7f23cce362b038bd2d68c4a7326a7c21577ddbdc542"}, + {file = "cvxpy-1.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7a46ef722c8d1590875e86360d5781703dfcbd08be73eb98a2fc91a280870064"}, + {file = "cvxpy-1.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:57593a852c563ce77bdb075a3e75f23d36d4b3162ebf3199b54cc7fe75088ef2"}, + {file = "cvxpy-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:db89b55025514bad821b1f1781bed373cbb6aa22fe84420431efd510dbe7f858"}, + {file = "cvxpy-1.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:372c0825cc6e6bb03ecc550d83718761a1bbdbbb48010fec6f9718581ebd45b5"}, + {file = "cvxpy-1.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:163caffd7f7f27b6cb151f4ccff283068e063c3673158793048761690cbe4bbe"}, + {file = "cvxpy-1.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f24067c54979b09910aea0a03256247121d8a8169538facf087c1923e9e2701a"}, + {file = "cvxpy-1.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a3ec054279880a9ebf5fd9d2ac4109acf944b8c45ea8b24e461680e34f3d7b5"}, + {file = "cvxpy-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:d220a7ee55907da9b55b98e5238d03735118d03b82855ba87b872cb2e6977367"}, + {file = "cvxpy-1.4.1.tar.gz", hash = "sha256:7a9ef34e3c57ff8c844d86f0a3834fb5575af19233947639de0ba577c6122e3e"}, +] + +[package.dependencies] +clarabel = ">=0.5.0" +ecos = ">=2" +numpy = ">=1.15" +osqp = ">=0.6.2" +pybind11 = "*" +scipy = ">=1.1.0" +scs = ">=3.0" + +[package.extras] +cbc = ["cylp (>=0.91.5)"] +clarabel = ["clarabel"] +cvxopt = ["cvxopt"] +diffcp = ["diffcp"] +glop = ["ortools (>=9.5,<9.8)"] +glpk = ["cvxopt"] +glpk-mi = ["cvxopt"] +gurobi = ["gurobipy"] +highs = ["scipy (>=1.6.1)"] +mosek = ["Mosek"] +pdlp = ["ortools (>=9.5,<9.8)"] +piqp = ["piqp"] +proxqp = ["proxsuite"] +scip = ["PySCIPOpt"] +scipy = ["scipy"] +scs = ["setuptools (>65.5.1)"] +xpress = ["xpress"] + [[package]] name = "cycler" version = "0.12.1" @@ -524,6 +603,35 @@ files = [ {file = "distlib-0.3.7.tar.gz", hash = "sha256:9dafe54b34a028eafd95039d5e5d4851a13734540f1331060d31c9916e7147a8"}, ] +[[package]] +name = "ecos" +version = "2.0.12" +description = "This is the Python package for ECOS: Embedded Cone Solver. See Github page for more information." +optional = false +python-versions = "*" +files = [ + {file = "ecos-2.0.12-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:835298a299c88c207b3402fba60ad9b5688b59bbbf2ac34a46de5b37165d773a"}, + {file = "ecos-2.0.12-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:608bc822ee8e070927ab3519169b13a1a0fe88f3d562212d6b5dbb1039776360"}, + {file = "ecos-2.0.12-cp310-cp310-win_amd64.whl", hash = "sha256:5184a9d8521ad1af90ffcd9902a6fa75c7bc473f37d30d86f97beda1033dfca2"}, + {file = "ecos-2.0.12-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:eba07599084724eedc20b2862d5580eebebb09609f4740baadc78401cb99827c"}, + {file = "ecos-2.0.12-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4979dc2d1cb6667e371a45a61887068505c1305437eef104ed6ef16f4b6aa0e3"}, + {file = "ecos-2.0.12-cp311-cp311-win_amd64.whl", hash = "sha256:da8fbbca3feb83a9e27075d29b3765417d0c80af8ea83cbdc4a558cae7b564af"}, + {file = "ecos-2.0.12-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:f70e4547966f530fd7715756f7a65d5b9b90b312b9d37f243ef9356c05e7d74c"}, + {file = "ecos-2.0.12-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:617be25d74222849622b0f82b94a11abcf1fae78ccaf69977b328321ee6ffa0b"}, + {file = "ecos-2.0.12-cp37-cp37m-win_amd64.whl", hash = "sha256:29d00164eaea66ed54697a3b361c575284a8bca54f2623381a0635806c7303a7"}, + {file = "ecos-2.0.12-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4e86671397d1d2cd7cccff8a9c45be0541b0c60af8b92a0ff3581c9ed869db67"}, + {file = "ecos-2.0.12-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:858a4dd3177bdc8cc6e362031732f5177b62138a1e4ef91c0dc3c6bd7d2d1248"}, + {file = "ecos-2.0.12-cp38-cp38-win_amd64.whl", hash = "sha256:528b02f53835bd1baeb2e23f8153b8d6cc2b3704e1768be6a1a972f542241670"}, + {file = "ecos-2.0.12-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3e42bd4c19af6e04f76ccc85d941b1f1adc7faeee4d06d482395a6beb7bec895"}, + {file = "ecos-2.0.12-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6def54336a15b5a49bc3bfcaa36035e8557cae8a4853b17ca84f5a29c93bcaea"}, + {file = "ecos-2.0.12-cp39-cp39-win_amd64.whl", hash = "sha256:7af08941552fce108bd80145cdb6be7fa74477a20bacdac170800442cc7027d4"}, + {file = "ecos-2.0.12.tar.gz", hash = "sha256:f48816d73b87ae325556ea537b7c8743187311403c80e3832035224156337c4e"}, +] + +[package.dependencies] +numpy = ">=1.6" +scipy = ">=0.9" + [[package]] name = "et-xmlfile" version = "1.1.0" @@ -1561,6 +1669,45 @@ files = [ [package.dependencies] et-xmlfile = "*" +[[package]] +name = "osqp" +version = "0.6.3" +description = "OSQP: The Operator Splitting QP Solver" +optional = false +python-versions = "*" +files = [ + {file = "osqp-0.6.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e6b7d923c836f1d07115057e595245ccc1694ecae730a1affda78fc6f3c8d239"}, + {file = "osqp-0.6.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e1dfda08c38c3521012740a73ef782f97dfc54a41deae4b0bc4afd18d0e74da0"}, + {file = "osqp-0.6.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7eafa3f3e82dd36c52f3f4ef19a95142405c807c272c4b53c5971c53535d7804"}, + {file = "osqp-0.6.3-cp310-cp310-win_amd64.whl", hash = "sha256:3cbb6efdaffb7387dc0037dfe3259d4803e5ad7217e6f20fb605c92953214b9d"}, + {file = "osqp-0.6.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1b2049b2c42565dcaa63ddca1c4028b1fb20aab141453f5d77e8ff5b1a99a2cf"}, + {file = "osqp-0.6.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:146b89f2cfbf59eaeb2c47e3a312f2034138df78d80ce052364810dc0ef70fc4"}, + {file = "osqp-0.6.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0084e3d733c75687d68bc133bc380ce471dfe6f7724af2718a43491782eec8d6"}, + {file = "osqp-0.6.3-cp311-cp311-win_amd64.whl", hash = "sha256:1b573fe1cd0e82239a279c58817c1d365187ef862e928b2b9c828c3c516ad3c2"}, + {file = "osqp-0.6.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:6c3951ef505177b858c6cd34de980346014cae3d2234c93db960b12c5885f9a2"}, + {file = "osqp-0.6.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc18f87c9549032c163ce590a5e32079df94ee656c8fb357ba607aa9d78fab81"}, + {file = "osqp-0.6.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c07b1a4b538aab629b0fae69f644b7e76f81f94d65230014d482e296dacd046b"}, + {file = "osqp-0.6.3-cp36-cp36m-win_amd64.whl", hash = "sha256:60abec3593870990b16f00bd5017096a7091fb00b68d0db3383fc048ca8e55c9"}, + {file = "osqp-0.6.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b73bdd9589901841af83c5ed6a4092b4fac5a0beff9e32682d8526d1f16a728c"}, + {file = "osqp-0.6.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71d9f611823af4a8b241c86805920e5382cd65c7f94fd3615b4eef999ed94c7c"}, + {file = "osqp-0.6.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:30fbc3b3c028c06a6c5f1e66be7b7106ad48a29e0dc5bd82393f82dd68235ef8"}, + {file = "osqp-0.6.3-cp37-cp37m-win_amd64.whl", hash = "sha256:fe57e4bde071b388518ecb068f26319506dd9cb107363d3d80c12d2e59fc1e81"}, + {file = "osqp-0.6.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:41f304d1d7f91af07d8f0b01e5af29ec3bb8824f0102c7fd8b13b497be120da4"}, + {file = "osqp-0.6.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea7d8c92bcdf4fef98d777f13d39060d425ef2e8778ed487c96a6fa10848cdea"}, + {file = "osqp-0.6.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f3a3c6d2708868e5e3fe2da300d6523cbf68a3d8734ce9c5043db37391969f5"}, + {file = "osqp-0.6.3-cp38-cp38-win_amd64.whl", hash = "sha256:1c548a0b3691850e7e22f3624a128d8af33416d70a9b5976a47d4d832028dcd8"}, + {file = "osqp-0.6.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:387e7abd737dfe32c9ec00ad74af25328cdd0d0f634d79530655c040a5cb9590"}, + {file = "osqp-0.6.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e1445e10a94e01698e13c87a7debf6ac1a15f3acd1f8f6340cb1ad945db4732b"}, + {file = "osqp-0.6.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0441c10f7fe5f46692a9b44a57138977bb112ae3f8127151671968c5d9ec5dbb"}, + {file = "osqp-0.6.3-cp39-cp39-win_amd64.whl", hash = "sha256:b15e65a307fbbabf60248bb9bc204e61d5d4ae64e00427a69e2dad9622f4c29d"}, + {file = "osqp-0.6.3.tar.gz", hash = "sha256:03e460e683ec2ce0f839353ddfa3c4c8ffa509ab8cf6a2b2afbb586fa453e180"}, +] + +[package.dependencies] +numpy = ">=1.7" +qdldl = "*" +scipy = ">=0.13.2" + [[package]] name = "packaging" version = "23.1" @@ -1907,6 +2054,20 @@ files = [ [package.dependencies] numpy = ">=1.16.6" +[[package]] +name = "pybind11" +version = "2.11.1" +description = "Seamless operability between C++11 and Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pybind11-2.11.1-py3-none-any.whl", hash = "sha256:33cdd02a6453380dd71cc70357ce388ad1ee8d32bd0e38fc22b273d050aa29b3"}, + {file = "pybind11-2.11.1.tar.gz", hash = "sha256:00cd59116a6e8155aecd9174f37ba299d1d397ed4a6b86ac1dfe01b3e40f2cc4"}, +] + +[package.extras] +global = ["pybind11-global (==2.11.1)"] + [[package]] name = "pycparser" version = "2.21" @@ -2249,6 +2410,38 @@ files = [ [package.dependencies] cffi = {version = "*", markers = "implementation_name == \"pypy\""} +[[package]] +name = "qdldl" +version = "0.1.7.post0" +description = "QDLDL, a free LDL factorization routine." +optional = false +python-versions = "*" +files = [ + {file = "qdldl-0.1.7.post0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8ab02e8b9ff86bd644a1935718387c82fbe04c31e3309cf9f7a121d02b1deda8"}, + {file = "qdldl-0.1.7.post0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40e5d6753310377451ed4dc09b1ef28faf40108b713e7f55c8a8ae94d679a672"}, + {file = "qdldl-0.1.7.post0-cp310-cp310-win_amd64.whl", hash = "sha256:718d8e141832e96ba71ca1807a74813836c6403110faaa3d33a67de1af3b29c4"}, + {file = "qdldl-0.1.7.post0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0e3f06e8a49ddd834b24fc3d7afbba4fec0923101045aa2666e18d2a9980e329"}, + {file = "qdldl-0.1.7.post0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a81c46522dd6b3042e2348fa98128bb5c0e466f42bce214e80cfb766ff40930"}, + {file = "qdldl-0.1.7.post0-cp311-cp311-win_amd64.whl", hash = "sha256:4a86155f3de66c5db0e21544b7a2421c671028fa20da407686d2a8d0e9b57e51"}, + {file = "qdldl-0.1.7.post0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:717cb1892b033c01a0aae84ededcfa1f05bcb97013095d779c497e6c32f90dac"}, + {file = "qdldl-0.1.7.post0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8fc35432913085d94b2327242cf51388467ef7a37ac0d71eb31b594b575dd498"}, + {file = "qdldl-0.1.7.post0-cp36-cp36m-win_amd64.whl", hash = "sha256:fd5cfd8c50f33ddacb830594a63b8c1093a24aea45312b9d2ed826cea5ece08a"}, + {file = "qdldl-0.1.7.post0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:981ca8672e9506976c663552c1eb6f6daf9726d62650b3bf5900260946156166"}, + {file = "qdldl-0.1.7.post0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8ec670d97cf756f9159dc0a11de5cf054e88aefe84bea1c7282f00334642843"}, + {file = "qdldl-0.1.7.post0-cp37-cp37m-win_amd64.whl", hash = "sha256:aa208703b44337a7e77f6f2663f7a452144becb4421970d534ff8297b92e1e10"}, + {file = "qdldl-0.1.7.post0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b42649484f7c0d8ee659224ecaac0a3e97f12531018207f4d7323e4071320eb1"}, + {file = "qdldl-0.1.7.post0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26aa3d6f0da7779265d72e8f418094003e75fa53c515a53bc03fd8b9bcfbf7de"}, + {file = "qdldl-0.1.7.post0-cp38-cp38-win_amd64.whl", hash = "sha256:e55bcd6962178029faf543addd49db145302dd51e19855fefa71b5fd55840eea"}, + {file = "qdldl-0.1.7.post0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c1dd0e570e65aaf35e10b7fb345f7ac763fd05a2227b9c06ce65e07993fc4984"}, + {file = "qdldl-0.1.7.post0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ae161342529852b6248ace4642bc4ee371a7c1e0707b7bc43a43ef7e73c06ca3"}, + {file = "qdldl-0.1.7.post0-cp39-cp39-win_amd64.whl", hash = "sha256:092f6606690a2b9bd3c939f3147887e02de13bb068fbed5ffdc7459034def623"}, + {file = "qdldl-0.1.7.post0.tar.gz", hash = "sha256:f346a114c8342ee6d4dbd6471eef314199fb268d3bf7b95885ca351fde2b023f"}, +] + +[package.dependencies] +numpy = ">=1.7" +scipy = ">=0.13.2" + [[package]] name = "referencing" version = "0.30.2" @@ -2597,6 +2790,39 @@ dev = ["click", "cython-lint (>=0.12.2)", "doit (>=0.36.0)", "mypy", "pycodestyl doc = ["jupytext", "matplotlib (>2)", "myst-nb", "numpydoc", "pooch", "pydata-sphinx-theme (==0.9.0)", "sphinx (!=4.1.0)", "sphinx-design (>=0.2.0)"] test = ["asv", "gmpy2", "mpmath", "pooch", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"] +[[package]] +name = "scs" +version = "3.2.4.post1" +description = "Splitting conic solver" +optional = false +python-versions = ">=3.7" +files = [ + {file = "scs-3.2.4.post1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:51fed30d2a4a1e6fbfc1e52b4cb3adeecbe89d7c47f3539b49afbb852415fe19"}, + {file = "scs-3.2.4.post1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb0524c0b9c3ed0d65dae161475accf3efa8e170938eb93251a60e9709b156ee"}, + {file = "scs-3.2.4.post1-cp310-cp310-win_amd64.whl", hash = "sha256:534519819eea96f18902a9fce15c4ec562b99d23b38dc843a48cb137b5641613"}, + {file = "scs-3.2.4.post1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8d04ee4d19ac6d0f5053663bc48fcd5c5faed534272f13b10a4e173c814eea69"}, + {file = "scs-3.2.4.post1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37c23b4299ab77ff5f654573d5667dc982292a8ef2b979053b38c40663919f13"}, + {file = "scs-3.2.4.post1-cp311-cp311-win_amd64.whl", hash = "sha256:ae4624938d3e3a8b7e508029275c6ad7a978fd48c158d0818f69f4ae764bf945"}, + {file = "scs-3.2.4.post1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:40294e22bfe509bdf7fd65a6b77c38cec22dcb3567ff5a75f3c41a1faf2ef1d5"}, + {file = "scs-3.2.4.post1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a2337acb0604770b6df1473254065a51c210ff9c82fc7c4490595510287a337"}, + {file = "scs-3.2.4.post1-cp312-cp312-win_amd64.whl", hash = "sha256:8689e75a57e59846e65d1c4b9d57e9964b00fcbb8e67fc77f98cf6e0a0530abd"}, + {file = "scs-3.2.4.post1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:ad991b00d0a87c85db57bf2f1863c21bdc4e2f13837f6c35e809f5936bc6f165"}, + {file = "scs-3.2.4.post1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a28af160a44268e726a59d6cf340629b82940c1a643c4c87fe777e9cbe550d75"}, + {file = "scs-3.2.4.post1-cp37-cp37m-win_amd64.whl", hash = "sha256:f6283f725f3fee63d4631c2532d01a5b2ea65883b04d3da3be06084b1c60171b"}, + {file = "scs-3.2.4.post1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8b3a622cf2120ae765f0f3ad5c6f4f86796d317e29132bab2ad4af3c14d9bf4d"}, + {file = "scs-3.2.4.post1-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:4b5259137c263304effa2b28d0125437ac23569e6e7753c115ae1206ec5033fd"}, + {file = "scs-3.2.4.post1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:424710bc19b0506feee7e05e6d2b7af98acf09af5bd5353126164cbd46ac923f"}, + {file = "scs-3.2.4.post1-cp38-cp38-win_amd64.whl", hash = "sha256:e21bdc8046648846e2c204a6c5cf24eaaedd2b8f5e0a2ab41a647b0247b8d592"}, + {file = "scs-3.2.4.post1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:cea0f7e9473f43f7edf1641d020ead7e39653a81c540fbdba8f3b7b8480038c9"}, + {file = "scs-3.2.4.post1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6126f1d7ed5ff368cb8c1836715b17a50074314579eefc6d511995a3ab93d70"}, + {file = "scs-3.2.4.post1-cp39-cp39-win_amd64.whl", hash = "sha256:18788befa5284bb1f49149bac7f813703de60ef5b6bf7698a9f1c3a5a49b78e4"}, + {file = "scs-3.2.4.post1.tar.gz", hash = "sha256:7015d7a56d1d5b53264fd277289ea169949309e26101677ff88cd0e5030d032f"}, +] + +[package.dependencies] +numpy = "*" +scipy = "*" + [[package]] name = "seaborn" version = "0.13.0" @@ -2740,6 +2966,24 @@ watchdog = {version = ">=2.1.5", markers = "platform_system != \"Darwin\""} [package.extras] snowflake = ["snowflake-connector-python (>=2.8.0)", "snowflake-snowpark-python (>=0.9.0)"] +[[package]] +name = "syntheticcontrolmethods" +version = "1.1.17" +description = "A Python package for causal inference using various Synthetic Control Methods" +optional = false +python-versions = "*" +files = [ + {file = "SyntheticControlMethods-1.1.17.tar.gz", hash = "sha256:941c77427ef1a87d5cf5863047c5d0c77c280a6b3aea04de3302e56fa76fe505"}, +] + +[package.dependencies] +cvxpy = ">=1.1.7" +jinja2 = ">=2.10" +matplotlib = ">=2.2.3" +numpy = ">=1.17" +pandas = ">=1.1.2" +scipy = ">=1.4.1" + [[package]] name = "tenacity" version = "8.2.3" @@ -3034,4 +3278,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = ">=3.9,<3.9.7 || >3.9.7,<3.13" -content-hash = "810bd9867f57e51b26369ed4149bd8d7497693e1b02b1cef1bdc20b02aa63869" +content-hash = "afc8b3ec58977cc258171ff816043f526daf261c308246efc484b33d87e196b9" diff --git a/pyproject.toml b/pyproject.toml index e15a59d..177ca8a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -29,6 +29,7 @@ streamlit = "^1.28.1" nbclient = "^0.9.0" matplotlib = "^3.8.1" seaborn = "^0.13.0" +syntheticcontrolmethods = "^1.1.17" [tool.poetry.group.dev.dependencies] pytest = "^7.2.0" diff --git a/tests/test_processing.py b/tests/test_processing.py index 771076b..cbed7ce 100644 --- a/tests/test_processing.py +++ b/tests/test_processing.py @@ -3,6 +3,7 @@ import pandas as pd import pytest +from covid19_drdfm.constants import NAME_MAP from covid19_drdfm.processing import ( DATA_DIR, ROOT_DIR, @@ -34,8 +35,8 @@ def test_get_govt_fund_dist(): assert int(sum(govt_fund) + 0.00001) == 1, "Distribution must sum to 1" -def test_adjust_inflation(sample_data): - input_df = sample_data.copy() +def test_adjust_inflation(raw_data): + input_df = raw_data.copy() output_df = adjust_inflation(input_df) assert input_df.Demand_1.iloc[0] < output_df.Demand_1.iloc[0] @@ -45,9 +46,9 @@ def test_adjust_pandemic_response(sample_data): #! Note - this is testing functionality, but is used per-state not on whole df out = adjust_pandemic_response(input_df) df = get_df() - responses = [f"Pandemic_Response_{x}" for x in [13, 14, 15]] + responses = [NAME_MAP[f"Pandemic_Response_{x}"] for x in [13, 14, 15]] for r in responses: - assert df[r].sum() == out[r].sum() + assert int(df[r].sum()) == int(out[r].sum()) def test_fix_datetime(raw_data): @@ -58,7 +59,7 @@ def test_fix_datetime(raw_data): def test_run(): df = get_df() - expected_columns = ["State", "Supply_1", "Demand_1", "Pandemic_Response_13", "Time"] + expected_columns = ["State", "GDP", "Deaths1"] assert all(col in df.columns for col in expected_columns)