From 0fc5c31c6ce7ec4e773d3d4b3141104c468c5f32 Mon Sep 17 00:00:00 2001 From: Darkshades Date: Sat, 22 Jun 2024 20:39:05 +0100 Subject: [PATCH] fixed issue with python 3.11 --- .../discriminant_analysis_classifier.py | 9 +- tune_classifier/ensemble_classifier.py | 87 ++++---- tune_classifier/linear_model_classifier.py | 65 +++--- tune_classifier/mlp_classifier.py | 37 ++-- tune_classifier/naive_bayes_classifier.py | 17 +- tune_classifier/neighbor_classifier.py | 19 +- tune_classifier/svc.py | 36 ++-- tune_classifier/tree_classifier.py | 21 +- tune_regressor/ensemble_regressor.py | 83 ++++---- tune_regressor/linear_model_regressor.py | 191 +++++++++--------- tune_regressor/mlp_regressor.py | 2 +- tune_regressor/neighbor_regressor.py | 11 +- tune_regressor/svr.py | 37 ++-- tune_regressor/tree_regressor.py | 21 +- 14 files changed, 311 insertions(+), 325 deletions(-) diff --git a/tune_classifier/discriminant_analysis_classifier.py b/tune_classifier/discriminant_analysis_classifier.py index 406f300..4164edf 100644 --- a/tune_classifier/discriminant_analysis_classifier.py +++ b/tune_classifier/discriminant_analysis_classifier.py @@ -1,7 +1,6 @@ from ..baseline import BaseTuner from optuna.trial import Trial -from dataclasses import dataclass -from types import MappingProxyType +from dataclasses import dataclass, field from typing import Iterable, Optional, Dict, Any, Callable from sklearn.discriminant_analysis import LinearDiscriminantAnalysis, QuadraticDiscriminantAnalysis @@ -10,7 +9,7 @@ class LDAClassifierTuner(BaseTuner): solver_space: Iterable[str] = ("svd", "lsqr", "eigen") shrinkage_space: Iterable[str] = (None, "auto") - tol_space: Dict[str, Any] = MappingProxyType({"low":1e-6, "high":1e-3, "step":None, "log":True}) + tol_space: Dict[str, Any] = field(default_factory=lambda: {"low":1e-6, "high":1e-3, "step":None, "log":True}) priors_space: Iterable[Optional[Iterable[float]]] = (None, ) store_covariance: Iterable[bool] = (False, ) covariance_estimator_space: Iterable[Optional[object]] = (None, ) @@ -44,8 +43,8 @@ def sample_model(self, trial: Optional[Trial] = None) -> Any: @dataclass class QDAClassifierTuner(BaseTuner): - reg_param_space: Dict[str, Any] = MappingProxyType({"low":0.1, "high":1.0, "step":None, "log":False}) - tol_space: Dict[str, Any] = MappingProxyType({"low":1e-6, "high":1e-3, "step":None, "log":True}) + reg_param_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.1, "high":1.0, "step":None, "log":False}) + tol_space: Dict[str, Any] = field(default_factory=lambda: {"low":1e-6, "high":1e-3, "step":None, "log":True}) priors_space: Iterable[Optional[Iterable[float]]] = (None,) store_covariance: Iterable[bool] = (False,) diff --git a/tune_classifier/ensemble_classifier.py b/tune_classifier/ensemble_classifier.py index 3da9910..423c28a 100644 --- a/tune_classifier/ensemble_classifier.py +++ b/tune_classifier/ensemble_classifier.py @@ -1,8 +1,7 @@ from ..baseline import BaseTuner from optuna.trial import Trial -from dataclasses import dataclass +from dataclasses import dataclass, field from typing import Iterable, Optional, Dict, Any, Union, Callable -from types import MappingProxyType from sklearn.ensemble import ( RandomForestClassifier, ExtraTreesClassifier, @@ -14,25 +13,25 @@ @dataclass class RandomForestClassifierTuner(BaseTuner): - n_estimators_space: Dict[str, Any] = MappingProxyType({"low":1, "high":200, "step":1, "log":True}) + n_estimators_space: Dict[str, Any] = field(default_factory=lambda: {"low":1, "high":200, "step":1, "log":True}) criterion_space: Iterable[str] = ("gini", "entropy", "log_loss") set_max_depth_space: Iterable[bool] = (True, False) - max_depth_space: Dict[str, Any] = MappingProxyType({"low":10, "high":2000, "step":1, "log":True}) - min_samples_split_space: Dict[str, Any] = MappingProxyType({"low":0.1, "high":1.0, "step":None, "log":False}) - min_samples_leaf_space: Dict[str, Any] = MappingProxyType({"low":0.1, "high":1.0, "step":None, "log":False}) - min_weight_fraction_leaf_space: Dict[str, Any] = MappingProxyType({"low":0.0, "high":0.5, "step":None, "log":False}) + max_depth_space: Dict[str, Any] = field(default_factory=lambda: {"low":10, "high":2000, "step":1, "log":True}) + min_samples_split_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.1, "high":1.0, "step":None, "log":False}) + min_samples_leaf_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.1, "high":1.0, "step":None, "log":False}) + min_weight_fraction_leaf_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.0, "high":0.5, "step":None, "log":False}) max_features_space: Iterable[str] = ("sqrt", "log2", None) set_max_leaf_nodes_space: Iterable[bool] = (True, False) - max_leaf_nodes_space: Dict[str, Any] = MappingProxyType({"low":2, "high":10000, "step":1, "log":True}) - min_impurity_decrease_space: Dict[str, Any] = MappingProxyType({"low":0.0, "high":1.0, "step":None, "log":False}) + max_leaf_nodes_space: Dict[str, Any] = field(default_factory=lambda: {"low":2, "high":10000, "step":1, "log":True}) + min_impurity_decrease_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.0, "high":1.0, "step":None, "log":False}) bootstrap_space: Iterable[bool] = (True, False) oob_score_space: Iterable[bool] = (True, False) class_weight_space: Iterable[str] = ("balanced", "balanced_subsample") set_random_state_space: Iterable[bool] = (False, ) - random_state_space: Dict[str, Any] = MappingProxyType({"low":1, "high":10000, "step":1, "log":True}) - ccp_alpha_space: Dict[str, Any] = MappingProxyType({"low":0.0, "high":1.0, "step":None, "log":False}) + random_state_space: Dict[str, Any] = field(default_factory=lambda: {"low":1, "high":10000, "step":1, "log":True}) + ccp_alpha_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.0, "high":1.0, "step":None, "log":False}) set_max_samples_space: Iterable[bool] = (True, False) - max_samples_space: Dict[str, Any] = MappingProxyType({"low":0.1, "high":1.0, "step":None, "log":False}) + max_samples_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.1, "high":1.0, "step":None, "log":False}) def sample_params(self, trial: Optional[Trial]=None) -> Dict[str, Any]: super().sample_params(trial) @@ -117,10 +116,10 @@ def sample_model(self, trial: Optional[Trial]=None) -> Any: @dataclass class AdaBoostClassifierTuner(BaseTuner): estimator_space: Iterable[Optional[object]] = (None, ) - n_estimators_space: Dict[str, Any] = MappingProxyType({"low":1, "high":200, "step":1, "log":True}) - learning_rate_space: Dict[str, Any] = MappingProxyType({"low":0.01, "high":1, "step":None, "log":True}) + n_estimators_space: Dict[str, Any] = field(default_factory=lambda: {"low":1, "high":200, "step":1, "log":True}) + learning_rate_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.01, "high":1, "step":None, "log":True}) algorithm_space: Iterable[str] = ("SAMME", "SAMME.R") - random_state_space: Dict[str, Any] = MappingProxyType({"low":1, "high":10000, "step":1, "log":True}) + random_state_space: Dict[str, Any] = field(default_factory=lambda: {"low":1, "high":10000, "step":1, "log":True}) def sample_params(self, trial: Optional[Trial]=None) -> Dict[str, Any]: super().sample_params(trial) @@ -146,26 +145,26 @@ def sample_model(self, trial: Optional[Trial]=None) -> Any: @dataclass class GradientBoostingClassifierTuner(BaseTuner): loss_space: Iterable[str] = ("log_loss", ) - learning_rate_space: Dict[str, Any] = MappingProxyType({"low":0.001, "high":1.0, "step":None, "log":True}) - n_estimators_space: Dict[str, Any] = MappingProxyType({"low":1, "high":100, "step":1, "log":True}) - subsample_space: Dict[str, Any] = MappingProxyType({"low":0.1, "high":1.0, "step":None, "log":False}) + learning_rate_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.001, "high":1.0, "step":None, "log":True}) + n_estimators_space: Dict[str, Any] = field(default_factory=lambda: {"low":1, "high":100, "step":1, "log":True}) + subsample_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.1, "high":1.0, "step":None, "log":False}) criterion_space: Iterable[str] = ("friedman_mse", "squared_error") - min_samples_split_space: Dict[str, Any] = MappingProxyType({"low":0.1, "high":1.0, "step":None, "log":False}) - min_samples_leaf_space: Dict[str, Any] = MappingProxyType({"low":0.1, "high":1.0, "step":None, "log":False}) - min_weight_fraction_leaf_space: Dict[str, Any] = MappingProxyType({"low":0.0, "high":0.5, "step":None, "log":False}) + min_samples_split_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.1, "high":1.0, "step":None, "log":False}) + min_samples_leaf_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.1, "high":1.0, "step":None, "log":False}) + min_weight_fraction_leaf_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.0, "high":0.5, "step":None, "log":False}) set_max_depth_space: Iterable[bool] = (True, False) - max_depth_space: Dict[str, Any] = MappingProxyType({"low":10, "high":2000, "step":1, "log":True}) - min_impurity_decrease_space: Dict[str, Any] = MappingProxyType({"low":0.0, "high":1.0, "step":None, "log":False}) + max_depth_space: Dict[str, Any] = field(default_factory=lambda: {"low":10, "high":2000, "step":1, "log":True}) + min_impurity_decrease_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.0, "high":1.0, "step":None, "log":False}) init_space: Iterable[Optional[object]] = (None, ) max_features_space: Iterable[str] = ("sqrt", "log2") set_max_leaf_nodes_space: Iterable[bool] = (True, False) - max_leaf_nodes_space: Iterable[Optional[int]] = MappingProxyType({"low":2, "high":10000, "step":1, "log":True}) - validation_fraction_space: Dict[str, Any] = MappingProxyType({"low":0.1, "high":0.5, "step":None, "log":False}) + max_leaf_nodes_space: Iterable[Optional[int]] = field(default_factory=lambda: {"low":2, "high":10000, "step":1, "log":True}) + validation_fraction_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.1, "high":0.5, "step":None, "log":False}) set_n_iter_no_change_space:Iterable[bool] = (True, False) - n_iter_no_change_space: Dict[str, Any] = MappingProxyType({"low":1, "high":100, "step":1, "log":True}) - random_state_space: Dict[str, Any] = MappingProxyType({"low":1, "high":10000, "step":1, "log":True}) - tol_space: Dict[str, Any] = MappingProxyType({"low":1e-6, "high":1e-3, "step":None, "log":True}) - ccp_alpha_space: Dict[str, Any] = MappingProxyType({"low":0.0, "high":1.0, "step":None, "log":False}) + n_iter_no_change_space: Dict[str, Any] = field(default_factory=lambda: {"low":1, "high":100, "step":1, "log":True}) + random_state_space: Dict[str, Any] = field(default_factory=lambda: {"low":1, "high":10000, "step":1, "log":True}) + tol_space: Dict[str, Any] = field(default_factory=lambda: {"low":1e-6, "high":1e-3, "step":None, "log":True}) + ccp_alpha_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.0, "high":1.0, "step":None, "log":False}) def sample_params(self, trial: Optional[Trial]=None) -> Dict[str, Any]: super().sample_params(trial) @@ -231,13 +230,13 @@ def sample_model(self, trial: Optional[Trial]=None) -> Any: @dataclass class BaggingClassifierTuner(BaseTuner): estimator_space: Iterable[Optional[object]] = (None, ) - n_estimators_space: Dict[str, Any] = MappingProxyType({"low":1, "high":100, "step":1, "log":True}) - max_samples_space: Dict[str, Any] = MappingProxyType({"low":0.1, "high":1.0, "step":None, "log":False}) - max_features_space: Dict[str, Any] = MappingProxyType({"low":0.1, "high":1.0, "step":None, "log":False}) + n_estimators_space: Dict[str, Any] = field(default_factory=lambda: {"low":1, "high":100, "step":1, "log":True}) + max_samples_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.1, "high":1.0, "step":None, "log":False}) + max_features_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.1, "high":1.0, "step":None, "log":False}) bootstrap_space: Iterable[bool] = (True, False) bootstrap_features_space: Iterable[bool] = (True, False) oob_score_space: Iterable[bool] = (True, False) - random_state_space: Dict[str, Any] = MappingProxyType({"low":1, "high":10000, "step":1, "log":True}) + random_state_space: Dict[str, Any] = field(default_factory=lambda: {"low":1, "high":10000, "step":1, "log":True}) def sample_params(self, trial: Optional[Trial]=None) -> Dict[str, Any]: super().sample_params(trial) @@ -275,24 +274,24 @@ def sample_model(self, trial: Optional[Trial]=None) -> Any: @dataclass class HistGradientBoostingClassifierTuner(BaseTuner): loss_space: Iterable[str] = ("log_loss", ) - learning_rate_space: Dict[str, Any] = MappingProxyType({"low":0.001, "high":1.0, "step":None, "log":True}) - max_iter_space: Dict[str, Any] = MappingProxyType({"low":10, "high":1000, "step":1, "log":True}) + learning_rate_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.001, "high":1.0, "step":None, "log":True}) + max_iter_space: Dict[str, Any] = field(default_factory=lambda: {"low":10, "high":1000, "step":1, "log":True}) set_max_leaf_nodes_space: Iterable[bool] = (True, False) - max_leaf_nodes_space: Iterable[Optional[int]] = MappingProxyType({"low":2, "high":10000, "step":1, "log":True}) + max_leaf_nodes_space: Iterable[Optional[int]] = field(default_factory=lambda: {"low":2, "high":10000, "step":1, "log":True}) set_max_depth_space: Iterable[bool] = (True, False) - max_depth_space: Dict[str, Any] = MappingProxyType({"low":10, "high":2000, "step":1, "log":True}) - min_samples_leaf_space: Dict[str, Any] = MappingProxyType({"low":1, "high":200, "step":1, "log":True}) - l2_regularization_space: Dict[str, Any] = MappingProxyType({"low":0.0, "high":1.0, "step":None, "log":False}) - max_bins_space: Dict[str, Any] = MappingProxyType({"low":10, "high":255, "step":1, "log":True}) + max_depth_space: Dict[str, Any] = field(default_factory=lambda: {"low":10, "high":2000, "step":1, "log":True}) + min_samples_leaf_space: Dict[str, Any] = field(default_factory=lambda: {"low":1, "high":200, "step":1, "log":True}) + l2_regularization_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.0, "high":1.0, "step":None, "log":False}) + max_bins_space: Dict[str, Any] = field(default_factory=lambda: {"low":10, "high":255, "step":1, "log":True}) categorical_features_space: Iterable[Any] = (None, ) monotonic_cst_space: Iterable[Any] = (None, ) interaction_cst_space: Iterable[Any] = (None, ) early_stopping_space: Iterable[bool] = ("auto", True, False) scoring_space: Iterable[Optional[Union[str, Callable]]] = ("loss", None) - validation_fraction_space: Dict[str, Any] = MappingProxyType({"low":0.1, "high":0.5, "step":None, "log":True}) - n_iter_no_change_space: Dict[str, Any] = MappingProxyType({"low":1, "high":100, "step":1, "log":True}) - tol_space: Dict[str, Any] = MappingProxyType({"low":1e-6, "high":1e-3, "step":None, "log":True}) - random_state_space: Dict[str, Any] = MappingProxyType({"low":1, "high":10000, "step":1, "log":True}) + validation_fraction_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.1, "high":0.5, "step":None, "log":True}) + n_iter_no_change_space: Dict[str, Any] = field(default_factory=lambda: {"low":1, "high":100, "step":1, "log":True}) + tol_space: Dict[str, Any] = field(default_factory=lambda: {"low":1e-6, "high":1e-3, "step":None, "log":True}) + random_state_space: Dict[str, Any] = field(default_factory=lambda: {"low":1, "high":10000, "step":1, "log":True}) class_weight_space: Iterable[str] = ("balanced", ) def sample_params(self, trial: Optional[Trial]=None) -> Dict[str, Any]: diff --git a/tune_classifier/linear_model_classifier.py b/tune_classifier/linear_model_classifier.py index e67872e..7843a57 100644 --- a/tune_classifier/linear_model_classifier.py +++ b/tune_classifier/linear_model_classifier.py @@ -1,8 +1,7 @@ from ..baseline import BaseTuner from optuna.trial import Trial -from dataclasses import dataclass -from typing import Iterable, Optional, Dict, Any, Callable -from types import MappingProxyType +from dataclasses import dataclass, field +from typing import Iterable, Optional, Dict, Any from sklearn.linear_model import ( LogisticRegression, Perceptron, @@ -13,16 +12,16 @@ class LogisticRegressionTuner(BaseTuner): penalty_space: Iterable[Optional[str]] = ("l1", "l2", "elasticnet", None) dual_space: Iterable[bool] = (True, False) - tol_space: Dict[str, Any] = MappingProxyType({"low":1e-6, "high":1e-3, "step":None, "log":True}) - C_space: Dict[str, Any] = MappingProxyType({"low":0.9, "high":1.0, "step":None, "log":False}) + tol_space: Dict[str, Any] = field(default_factory=lambda: {"low":1e-6, "high":1e-3, "step":None, "log":True}) + C_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.9, "high":1.0, "step":None, "log":False}) fit_intercept_space: Iterable[bool] = (True, False) - intercept_scaling_space: Dict[str, Any] = MappingProxyType({"low":0.5, "high":1.0, "step":None, "log":False}) + intercept_scaling_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.5, "high":1.0, "step":None, "log":False}) class_weight_space: Iterable[str] = ("balanced", ) solver_space: Iterable[str] = ("lbfgs", "liblinear", "newton-cg", "newton-cholesky", "sag", "saga") - max_iter_space: Dict[str, Any] = MappingProxyType({"low":100, "high":1000, "step":1, "log":True}) + max_iter_space: Dict[str, Any] = field(default_factory=lambda: {"low":100, "high":1000, "step":1, "log":True}) multi_class_space: Iterable[str] = ("auto", ) - l1_ratio_space: Dict[str, Any] = MappingProxyType({"low":0.0, "high":1.0, "step":None, "log":False}) - random_state_space: Dict[str, Any] = MappingProxyType({"low":1, "high":10000, "step":1, "log":True}) + l1_ratio_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.0, "high":1.0, "step":None, "log":False}) + random_state_space: Dict[str, Any] = field(default_factory=lambda: {"low":1, "high":10000, "step":1, "log":True}) def sample_params(self, trial: Optional[Trial]=None) -> Dict[str, Any]: super().sample_params(trial) @@ -59,17 +58,17 @@ def sample_model(self, trial: Optional[Trial]=None) -> Any: @dataclass class PerceptronTuner(BaseTuner): penalty_space: Iterable[Optional[str]] = ("l1", "l2", "elasticnet", None) - alpha_space: Dict[str, Any] = MappingProxyType({"low":1e-5, "high":1.0, "step":None, "log":True}) - l1_ratio_space: Dict[str, Any] = MappingProxyType({"low":0.0, "high":1.0, "step":None, "log":False}) + alpha_space: Dict[str, Any] = field(default_factory=lambda: {"low":1e-5, "high":1.0, "step":None, "log":True}) + l1_ratio_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.0, "high":1.0, "step":None, "log":False}) fit_intercept_space: Iterable[bool] = (True, False) - max_iter_space: Dict[str, Any] = MappingProxyType({"low":100, "high":2000, "step":1, "log":True}) - tol_space: Dict[str, Any] = MappingProxyType({"low":1e-6, "high":1e-3, "step":None, "log":True}) + max_iter_space: Dict[str, Any] = field(default_factory=lambda: {"low":100, "high":2000, "step":1, "log":True}) + tol_space: Dict[str, Any] = field(default_factory=lambda: {"low":1e-6, "high":1e-3, "step":None, "log":True}) shuffle_space: Iterable[bool] = (True, False) - eta0_space: Dict[str, Any] = MappingProxyType({"low":0.1, "high":1.0, "step":None, "log":False}) - random_state_space: Dict[str, Any] = MappingProxyType({"low":1, "high":10000, "step":1, "log":True}) + eta0_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.1, "high":1.0, "step":None, "log":False}) + random_state_space: Dict[str, Any] = field(default_factory=lambda: {"low":1, "high":10000, "step":1, "log":True}) early_stopping_space: Iterable[bool] = (True, False) - validation_fraction_space: Dict[str, Any] = MappingProxyType({"low":0.1, "high":0.5, "step":None, "log":False}) - n_iter_no_change_space: Dict[str, Any] = MappingProxyType({"low":1, "high":100, "step":1, "log":True}) + validation_fraction_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.1, "high":0.5, "step":None, "log":False}) + n_iter_no_change_space: Dict[str, Any] = field(default_factory=lambda: {"low":1, "high":100, "step":1, "log":True}) class_weight_space: Iterable[str] = ("balanced", ) def sample_params(self, trial: Optional[Trial]=None) -> Dict[str, Any]: @@ -105,16 +104,16 @@ def sample_model(self, trial: Optional[Trial]=None) -> Any: @dataclass class PassiveAggressiveClassifierTuner(BaseTuner): - C_space: Dict[str, Any] = MappingProxyType({"low":0.9, "high":1.0, "step":None, "log":False}) + C_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.9, "high":1.0, "step":None, "log":False}) fit_intercept_space: Iterable[bool] = (True, False) - max_iter_space: Dict[str, Any] = MappingProxyType({"low":100, "high":2000, "step":1, "log":True}) - tol_space: Dict[str, Any] = MappingProxyType({"low":1e-6, "high":1e-3, "step":None, "log":True}) + max_iter_space: Dict[str, Any] = field(default_factory=lambda: {"low":100, "high":2000, "step":1, "log":True}) + tol_space: Dict[str, Any] = field(default_factory=lambda: {"low":1e-6, "high":1e-3, "step":None, "log":True}) early_stopping_space: Iterable[bool] = (True, False) - validation_fraction_space: Dict[str, Any] = MappingProxyType({"low":0.1, "high":0.5, "step":None, "log":False}) - n_iter_no_change_space: Dict[str, Any] = MappingProxyType({"low":1, "high":100, "step":1, "log":True}) + validation_fraction_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.1, "high":0.5, "step":None, "log":False}) + n_iter_no_change_space: Dict[str, Any] = field(default_factory=lambda: {"low":1, "high":100, "step":1, "log":True}) shuffle_space: Iterable[bool] = (True, False) loss_space: Iterable[str] = ("hinge", ) - random_state_space: Dict[str, Any] = MappingProxyType({"low":1, "high":10000, "step":1, "log":True}) + random_state_space: Dict[str, Any] = field(default_factory=lambda: {"low":1, "high":10000, "step":1, "log":True}) class_weight_space: Iterable[str] = ("balanced", ) average_space: Iterable[bool] = (True, False) @@ -161,20 +160,20 @@ class SGDClassifierTuner(BaseTuner): "epsilon_insensitive", "squared_epsilon_insensitive") penalty_space: Iterable[str] = ("l1", "l2", "elasticnet", None) - alpha_space: Dict[str, Any] = MappingProxyType({"low":1e-5, "high":1.0, "step":None, "log":True}) - l1_ratio_space: Dict[str, Any] = MappingProxyType({"low":0.0, "high":1.0, "step":None, "log":False}) + alpha_space: Dict[str, Any] = field(default_factory=lambda: {"low":1e-5, "high":1.0, "step":None, "log":True}) + l1_ratio_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.0, "high":1.0, "step":None, "log":False}) fit_intercept_space: Iterable[bool] = (True, False) - max_iter_space: Dict[str, Any] = MappingProxyType({"low":100, "high":2000, "step":1, "log":True}) - tol_space: Dict[str, Any] = MappingProxyType({"low":1e-6, "high":1e-3, "step":None, "log":True}) + max_iter_space: Dict[str, Any] = field(default_factory=lambda: {"low":100, "high":2000, "step":1, "log":True}) + tol_space: Dict[str, Any] = field(default_factory=lambda: {"low":1e-6, "high":1e-3, "step":None, "log":True}) shuffle_space: Iterable[bool] = (True, False) - epsilon_space: Dict[str, Any] = MappingProxyType({"low":0.1, "high":1.0, "step":None, "log":False}) - random_state_space: Dict[str, Any] = MappingProxyType({"low":1, "high":10000, "step":1, "log":True}) + epsilon_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.1, "high":1.0, "step":None, "log":False}) + random_state_space: Dict[str, Any] = field(default_factory=lambda: {"low":1, "high":10000, "step":1, "log":True}) learning_rate_space: Iterable[str] = ("constant", "optimal", "invscaling", "adaptive") - eta0_space: Dict[str, Any] = MappingProxyType({"low":0.1, "high":1.0, "step":None, "log":False}) - power_t_space: Dict[str, Any] = MappingProxyType({"low":-1.0, "high":1.0, "step":None, "log":False}) + eta0_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.1, "high":1.0, "step":None, "log":False}) + power_t_space: Dict[str, Any] = field(default_factory=lambda: {"low":-1.0, "high":1.0, "step":None, "log":False}) early_stopping_space: Iterable[bool] = (True, False) - validation_fraction_space: Dict[str, Any] = MappingProxyType({"low":0.1, "high":0.5, "step":None, "log":False}) - n_iter_no_change_space: Dict[str, Any] = MappingProxyType({"low":1, "high":100, "step":1, "log":True}) + validation_fraction_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.1, "high":0.5, "step":None, "log":False}) + n_iter_no_change_space: Dict[str, Any] = field(default_factory=lambda: {"low":1, "high":100, "step":1, "log":True}) class_weight_space: Iterable[str] = ("balanced", ) average_space: Iterable[bool] = (True, False) diff --git a/tune_classifier/mlp_classifier.py b/tune_classifier/mlp_classifier.py index 755d79c..111fe90 100644 --- a/tune_classifier/mlp_classifier.py +++ b/tune_classifier/mlp_classifier.py @@ -1,35 +1,34 @@ from ..baseline import BaseTuner from optuna.trial import Trial -from dataclasses import dataclass -from typing import Iterable, Optional, Dict, Any, Callable -from types import MappingProxyType +from dataclasses import dataclass, field +from typing import Iterable, Optional, Dict, Any from sklearn.neural_network import MLPClassifier @dataclass class MLPClassifierTuner(BaseTuner): - n_hidden_space: Dict[str, Any] = MappingProxyType({"low":1, "high":5, "step":1, "log":False}) - hidden_layer_sizes_space: Dict[str, Any] = MappingProxyType({"low":100, "high":200, "step":1, "log":True}) + n_hidden_space: Dict[str, Any] = field(default_factory=lambda: {"low":1, "high":5, "step":1, "log":False}) + hidden_layer_sizes_space: Dict[str, Any] = field(default_factory=lambda: {"low":100, "high":200, "step":1, "log":True}) activation_space: Iterable[str] = ("identity", "logistic", "tanh", "relu") solver_space: Iterable[str] = ("lbfgs", "sgd", "adam") - alpha_space: Dict[str, Any] = MappingProxyType({"low":1e-4, "high":1.0, "step":None, "log":True}) - batch_size_space: Dict[str, Any] = MappingProxyType({"low":8, "high":256, "step":1, "log":True}) + alpha_space: Dict[str, Any] = field(default_factory=lambda: {"low":1e-4, "high":1.0, "step":None, "log":True}) + batch_size_space: Dict[str, Any] = field(default_factory=lambda: {"low":8, "high":256, "step":1, "log":True}) learning_rate_space: Iterable[str] = ("constant", "invscaling", "adaptive") - learning_rate_init_space: Dict[str, Any] = MappingProxyType({"low":1e-4, "high":1e-2, "step":None, "log":True}) - power_t_space: Dict[str, Any] = MappingProxyType({"low":0.1, "high":1.0, "step":None, "log":False}) - max_iter_space: Dict[str, Any] = MappingProxyType({"low":200, "high":1000, "step":1, "log":True}) + learning_rate_init_space: Dict[str, Any] = field(default_factory=lambda: {"low":1e-4, "high":1e-2, "step":None, "log":True}) + power_t_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.1, "high":1.0, "step":None, "log":False}) + max_iter_space: Dict[str, Any] = field(default_factory=lambda: {"low":200, "high":1000, "step":1, "log":True}) shuffle_space: Iterable[bool] = (True, False) - random_state_space: Dict[str, Any] = MappingProxyType({"low":1, "high":10000, "step":1, "log":True}) - tol_space: Dict[str, Any] = MappingProxyType({"low":1e-5, "high":1e-2, "step":None, "log":True}) - momentum_space: Dict[str, Any] = MappingProxyType({"low":0.9, "high":1.0, "step":None, "log":False}) + random_state_space: Dict[str, Any] = field(default_factory=lambda: {"low":1, "high":10000, "step":1, "log":True}) + tol_space: Dict[str, Any] = field(default_factory=lambda: {"low":1e-5, "high":1e-2, "step":None, "log":True}) + momentum_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.9, "high":1.0, "step":None, "log":False}) nesterovs_momentum_space: Iterable[bool] = (True, False) early_stopping_space: Iterable[bool] = (True, False) - validation_fraction_space: Dict[str, Any] = MappingProxyType({"low":0.1, "high":0.5, "step":None, "log":False}) - beta_1_space: Dict[str, Any] = MappingProxyType({"low":0.9, "high":1.0, "step":None, "log":False}) - beta_2_space: Dict[str, Any] = MappingProxyType({"low":0.9, "high":1.0, "step":None, "log":False}) - epsilon_space: Dict[str, Any] = MappingProxyType({"low":1e-8, "high":1e-5, "step":None, "log":True}) - n_iter_no_change_space: Dict[str, Any] = MappingProxyType({"low":3, "high":50, "step":1, "log":True}) - max_fun_space: Dict[str, Any] = MappingProxyType({"low":10000, "high":20000, "step":1, "log":True}) + validation_fraction_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.1, "high":0.5, "step":None, "log":False}) + beta_1_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.9, "high":1.0, "step":None, "log":False}) + beta_2_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.9, "high":1.0, "step":None, "log":False}) + epsilon_space: Dict[str, Any] = field(default_factory=lambda: {"low":1e-8, "high":1e-5, "step":None, "log":True}) + n_iter_no_change_space: Dict[str, Any] = field(default_factory=lambda: {"low":3, "high":50, "step":1, "log":True}) + max_fun_space: Dict[str, Any] = field(default_factory=lambda: {"low":10000, "high":20000, "step":1, "log":True}) def sample_params(self, trial: Optional[Trial] = None) -> Dict[str, Any]: super().sample_params(trial) diff --git a/tune_classifier/naive_bayes_classifier.py b/tune_classifier/naive_bayes_classifier.py index 75ab341..d66e1ec 100644 --- a/tune_classifier/naive_bayes_classifier.py +++ b/tune_classifier/naive_bayes_classifier.py @@ -1,8 +1,7 @@ from ..baseline import BaseTuner from optuna.trial import Trial -from dataclasses import dataclass -from typing import Callable,Iterable, Optional, Dict, Any, Union -from types import MappingProxyType +from dataclasses import dataclass, field +from typing import Iterable, Optional, Dict, Any, Union from sklearn.naive_bayes import ( BernoulliNB, GaussianNB, @@ -14,7 +13,7 @@ @dataclass class GaussianNBTuner(BaseTuner): priors_space: Iterable[Optional[Iterable[float]]] = (None,) - var_smoothing_space: Dict[str, Any] = MappingProxyType({"low":1e-10, "high":1e-6, "step":None, "log":True}) + var_smoothing_space: Dict[str, Any] = field(default_factory=lambda: {"low":1e-10, "high":1e-6, "step":None, "log":True}) def sample_params(self, trial: Optional[Trial] = None) -> Dict[str, Any]: super().sample_params(trial) @@ -37,10 +36,10 @@ def sample_model(self, trial: Optional[Trial]=None) -> Any: @dataclass class BernoulliNBTuner(BaseTuner): - alpha_space: Dict[str, Any] = MappingProxyType({"low":0.0, "high":1.0, "step":None, "log":False}) + alpha_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.0, "high":1.0, "step":None, "log":False}) force_alpha_space: Iterable[bool] = (True, False) set_binarize_space: Iterable[bool] = (True, False) - binarize_space: Dict[str, Any] = MappingProxyType({"low":0.0, "high":1.0, "step":None, "log":False}) + binarize_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.0, "high":1.0, "step":None, "log":False}) fit_prior_space: Iterable[bool] = (True, False) class_prior_space: Iterable[Optional[Iterable[float]]] = (None, ) #TODO: Implement array selections @@ -72,7 +71,7 @@ def sample_model(self, trial: Optional[Trial] = None) -> Any: @dataclass class MultinomialNBTuner(BaseTuner): - alpha_space: Dict[str, Any] = MappingProxyType({"low":0.0, "high":1.0, "step":None, "log":False}) + alpha_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.0, "high":1.0, "step":None, "log":False}) force_alpha_space: Iterable[bool] = (True, False) fit_prior_space: Iterable[bool] = (True, False) class_prior_space: Iterable[Optional[Iterable[float]]] = (None, ) @@ -99,7 +98,7 @@ def sample_model(self, trial: Optional[Trial] = None) -> Any: @dataclass class ComplementNBTuner(BaseTuner): - alpha_space: Dict[str, Any] = MappingProxyType({"low":0.0, "high":1.0, "step":None, "log":False}) + alpha_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.0, "high":1.0, "step":None, "log":False}) force_alpha_space: Iterable[bool] = (True, False) fit_prior_space: Iterable[bool] = (True, False) class_prior_space: Iterable[Optional[Iterable[float]]] = (None, ) @@ -129,7 +128,7 @@ def sample_model(self, trial: Optional[Trial] = None) -> Any: @dataclass class CategoricalNBTuner(BaseTuner): - alpha_space: Dict[str, Any] = MappingProxyType({"low":0.0, "high":1.0, "step":None, "log":False}) + alpha_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.0, "high":1.0, "step":None, "log":False}) force_alpha_space: Iterable[bool] = (True, False) fit_prior_space: Iterable[bool] = (True, False) class_prior_space: Iterable[Optional[Iterable[float]]] = (None,) diff --git a/tune_classifier/neighbor_classifier.py b/tune_classifier/neighbor_classifier.py index a73c2e3..a8c128a 100644 --- a/tune_classifier/neighbor_classifier.py +++ b/tune_classifier/neighbor_classifier.py @@ -1,18 +1,17 @@ from ..baseline import BaseTuner from optuna.trial import Trial -from dataclasses import dataclass -from typing import Iterable, Optional, Dict, Any, Callable -from types import MappingProxyType +from dataclasses import dataclass, field +from typing import Iterable, Optional, Dict, Any from sklearn.neighbors import KNeighborsClassifier, RadiusNeighborsClassifier, NearestCentroid @dataclass class KNeighborsClassifierTuner(BaseTuner): - n_neighbors_space: Dict[str, Any] = MappingProxyType({"low":1, "high":10, "step":2, "log":False}) + n_neighbors_space: Dict[str, Any] = field(default_factory=lambda: {"low":1, "high":10, "step":2, "log":False}) weights_space: Iterable[str] = ("uniform", "distance") algorithm_space: Iterable[str] = ("ball_tree", "kd_tree", "brute") - leaf_size_space: Dict[str, Any] = MappingProxyType({"low":2, "high":100, "step":1, "log":True}) - p_space: Dict[str, Any] = MappingProxyType({"low":3, "high":8, "step":1, "log":False}) + leaf_size_space: Dict[str, Any] = field(default_factory=lambda: {"low":2, "high":100, "step":1, "log":True}) + p_space: Dict[str, Any] = field(default_factory=lambda: {"low":3, "high":8, "step":1, "log":False}) metric_space: Iterable[str] = ("cityblock", "cosine", "euclidean", "manhattan", "minkowski") def sample_params(self, trial: Optional[Trial] = None) -> Dict[str, Any]: @@ -42,11 +41,11 @@ def sample_model(self, trial: Optional[Trial] = None) -> Any: @dataclass class RadiusNeighborsClassifierTuner(BaseTuner): - radius_space: Dict[str, Any] = MappingProxyType({"low":2, "high":20, "step":1, "log":False}) + radius_space: Dict[str, Any] = field(default_factory=lambda: {"low":2, "high":20, "step":1, "log":False}) weight_space: Iterable[str] = ("uniform", "distance") algorithm_space: Iterable[str] = ("ball_tree", "kd_tree", "brute") - leaf_size_space: Dict[str, Any] = MappingProxyType({"low":2, "high":100, "step":1, "log":True}) - p_space: Dict[str, Any] = MappingProxyType({"low":3, "high":10, "step":1, "log":False}) + leaf_size_space: Dict[str, Any] = field(default_factory=lambda: {"low":2, "high":100, "step":1, "log":True}) + p_space: Dict[str, Any] = field(default_factory=lambda: {"low":3, "high":10, "step":1, "log":False}) metric_space: Iterable[str] = ("cityblock", "cosine", "euclidean", "manhattan", "minkowski") outlier_label_space: Iterable[str] = (None, "most_frequent") @@ -79,7 +78,7 @@ def sample_model(self, trial: Optional[Trial] = None) -> Any: @dataclass class NearestCentroidClassifierTuner(BaseTuner): metric_space: Iterable[str] = ("cityblock", "cosine", "euclidean", "manhattan") - shrink_threshold_space: Dict[str, Any] = MappingProxyType({"low":0.1, "high":0.9, "step":None, "log":False}) + shrink_threshold_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.1, "high":0.9, "step":None, "log":False}) def sample_params(self, trial: Optional[Trial] = None) -> Dict[str, Any]: super().sample_params(trial) diff --git a/tune_classifier/svc.py b/tune_classifier/svc.py index 3d75a8a..5956135 100644 --- a/tune_classifier/svc.py +++ b/tune_classifier/svc.py @@ -1,23 +1,21 @@ from ..baseline import BaseTuner from optuna.trial import Trial -from dataclasses import dataclass -from typing import Iterable, Optional, Dict, Any, Callable +from dataclasses import dataclass, field +from typing import Iterable, Optional, Dict, Any from sklearn.svm import SVC, LinearSVC, NuSVC -from types import MappingProxyType - @dataclass class SVCTuner(BaseTuner): kernel_space: Iterable[str] = ("linear", "poly", "rbf", "sigmoid") - degree_space: Dict[str, Any] = MappingProxyType({"low":1, "high":5, "step":1, "log":False}) + degree_space: Dict[str, Any] = field(default_factory=lambda: {"low":1, "high":5, "step":1, "log":False}) gamma_space: Iterable[str] = ("scale", "auto") - coef0_space: Dict[str, Any] = MappingProxyType({"low":0.0, "high":0.5, "step":None, "log":False}) - tol_space: Dict[str, Any] = MappingProxyType({"low":1e-6, "high":1e-3, "step":None, "log":True}) - C_space: Dict[str, Any] = MappingProxyType({"low":0.5, "high":1.0, "step":None, "log":False}) + coef0_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.0, "high":0.5, "step":None, "log":False}) + tol_space: Dict[str, Any] = field(default_factory=lambda: {"low":1e-6, "high":1e-3, "step":None, "log":True}) + C_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.5, "high":1.0, "step":None, "log":False}) class_weight_space: Iterable[str] = ("balanced", ) shrinking_space: Iterable[bool] = (True, ) probability_space: Iterable[bool] = (True, ) - random_state_space: Dict[str, Any] = MappingProxyType({"low":1, "high":10000, "step":1, "log":True}) + random_state_space: Dict[str, Any] = field(default_factory=lambda: {"low":1, "high":10000, "step":1, "log":True}) def sample_params(self, trial: Optional[Trial]=None) -> Dict[str, Any]: super().sample_params(trial) @@ -50,14 +48,14 @@ class LinearSVCTuner(BaseTuner): penalty_space: Iterable[str] = ("l1", "l2") loss_space: Iterable[str] = ("hinge", "squared_hinge") dual_space: Iterable[bool] = (True, False) - tol_space: Dict[str, Any] = MappingProxyType({"low":1e-6, "high":1e-3, "step":None, "log":True}) - C_space: Dict[str, Any] = MappingProxyType({"low":0.5, "high":1.0, "step":None, "log":False}) + tol_space: Dict[str, Any] = field(default_factory=lambda: {"low":1e-6, "high":1e-3, "step":None, "log":True}) + C_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.5, "high":1.0, "step":None, "log":False}) multi_class_space: Iterable[str] = ("ovr", "crammer_singer") fit_intercept_space: Iterable[bool] = (True, False) - intercept_scaling_space: Dict[str, Any] = MappingProxyType({"low":0.5, "high":1.0, "step":None, "log":False}) + intercept_scaling_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.5, "high":1.0, "step":None, "log":False}) class_weight_space: Iterable[str] = ("balanced", ) - max_iter_space: Dict[str, Any] = MappingProxyType({"low":500, "high":2000, "step":1, "log":True}) - random_state_space: Dict[str, Any] = MappingProxyType({"low":1, "high":10000, "step":1, "log":True}) + max_iter_space: Dict[str, Any] = field(default_factory=lambda: {"low":500, "high":2000, "step":1, "log":True}) + random_state_space: Dict[str, Any] = field(default_factory=lambda: {"low":1, "high":10000, "step":1, "log":True}) def sample_params(self, trial: Optional[Trial]=None) -> Dict[str, Any]: super().sample_params(trial) @@ -89,18 +87,18 @@ def sample_model(self, trial: Optional[Trial]=None) -> Any: @dataclass class NuSVCTuner(BaseTuner): - nu_space: Dict[str, Any] = MappingProxyType({"low":0.1, "high":0.5, "step":None, "log":False}) + nu_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.1, "high":0.5, "step":None, "log":False}) kernel_space: Iterable[str] = ("linear", "poly", "rbf", "sigmoid") - degree_space: Dict[str, Any] = MappingProxyType({"low":1, "high":5, "step":1, "log":False}) + degree_space: Dict[str, Any] = field(default_factory=lambda: {"low":1, "high":5, "step":1, "log":False}) gamma_space: Iterable[str] = ("scale", "auto") - coef0_space: Dict[str, Any] = MappingProxyType({"low":0.0, "high":0.5, "step":None, "log":False}) + coef0_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.0, "high":0.5, "step":None, "log":False}) shrinking_space: Iterable[bool] = (True, ) probability_space: Iterable[bool] = (True, ) - tol_space: Dict[str, Any] = MappingProxyType({"low":1e-6, "high":1e-3, "step":None, "log":True}) + tol_space: Dict[str, Any] = field(default_factory=lambda: {"low":1e-6, "high":1e-3, "step":None, "log":True}) class_weight_space: Iterable[str] = ("balanced", ) decision_function_shape_space: Iterable[str] = ("ovo", "ovr") break_ties_space: Iterable[bool] = (False, ) - random_state_space: Dict[str, Any] = MappingProxyType({"low":1, "high":10000, "step":1, "log":True}) + random_state_space: Dict[str, Any] = field(default_factory=lambda: {"low":1, "high":10000, "step":1, "log":True}) def sample_params(self, trial: Optional[Trial]=None) -> Dict[str, Any]: super().sample_params(trial) diff --git a/tune_classifier/tree_classifier.py b/tune_classifier/tree_classifier.py index e69c49f..75a6ffb 100644 --- a/tune_classifier/tree_classifier.py +++ b/tune_classifier/tree_classifier.py @@ -1,8 +1,7 @@ from ..baseline import BaseTuner from optuna.trial import Trial -from dataclasses import dataclass -from typing import Iterable, Optional, Dict, Any, Union, Callable -from types import MappingProxyType +from dataclasses import dataclass, field +from typing import Iterable, Optional, Dict, Any, Union from sklearn.tree import DecisionTreeClassifier, ExtraTreeClassifier @@ -10,15 +9,15 @@ class DecisionTreeClassifierTuner(BaseTuner): criterion_space: Iterable[str] = ("gini", "entropy", "log_loss") splitter_space: Iterable[str] = ("best", "random") - max_depth_space: Dict[str, Any] = MappingProxyType({"low":2, "high":1000, "step":1, "log":True}) - min_samples_split_space: Iterable[Union[int, float]] = MappingProxyType({"low":1e-4, "high":1.0, "step":None, "log":True}) - min_samples_leaf_space: Iterable[Union[int, float]] = MappingProxyType({"low":1e-4, "high":1.0, "step":None, "log":True}) - min_weight_fraction_leaf_space: Dict[str, Any] = MappingProxyType({"low":0.0, "high":0.5, "step":None, "log":False}) + max_depth_space: Dict[str, Any] = field(default_factory=lambda: {"low":2, "high":1000, "step":1, "log":True}) + min_samples_split_space: Iterable[Union[int, float]] = field(default_factory=lambda: {"low":1e-4, "high":1.0, "step":None, "log":True}) + min_samples_leaf_space: Iterable[Union[int, float]] = field(default_factory=lambda: {"low":1e-4, "high":1.0, "step":None, "log":True}) + min_weight_fraction_leaf_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.0, "high":0.5, "step":None, "log":False}) max_features_space: Iterable[Optional[str]] = ("sqrt", "log2", None) - random_state_space: Dict[str, Any] = MappingProxyType({"low":1, "high":10000, "step":1, "log":True}) - max_leaf_nodes_space: Dict[str, Any] = MappingProxyType({"low":2, "high":1000, "step":1, "log":True}) - min_impurity_decrease_space: Dict[str, Any] = MappingProxyType({"low":0.0, "high":1.0, "step":None, "log":False}) - ccp_alpha_space: Dict[str, Any] = MappingProxyType({"low":0.0, "high":1.0, "step":None, "log":False}) + random_state_space: Dict[str, Any] = field(default_factory=lambda: {"low":1, "high":10000, "step":1, "log":True}) + max_leaf_nodes_space: Dict[str, Any] = field(default_factory=lambda: {"low":2, "high":1000, "step":1, "log":True}) + min_impurity_decrease_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.0, "high":1.0, "step":None, "log":False}) + ccp_alpha_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.0, "high":1.0, "step":None, "log":False}) class_weight_space: Iterable[Optional[str]] = ("balanced", None) def sample_params(self, trial: Optional[Trial]=None) -> Dict[str, Any]: diff --git a/tune_regressor/ensemble_regressor.py b/tune_regressor/ensemble_regressor.py index e81a65a..86ca865 100644 --- a/tune_regressor/ensemble_regressor.py +++ b/tune_regressor/ensemble_regressor.py @@ -1,8 +1,7 @@ from ..baseline import BaseTuner from optuna.trial import Trial -from dataclasses import dataclass +from dataclasses import dataclass, field from typing import Iterable, Optional, Dict, Any, Union, Callable -from types import MappingProxyType from sklearn.ensemble import ( RandomForestRegressor, ExtraTreesRegressor, @@ -15,24 +14,24 @@ @dataclass class RandomForestRegressorTuner(BaseTuner): - n_estimators_space: Dict[str, Any] = MappingProxyType({"low":1, "high":200, "step":1, "log":True}) + n_estimators_space: Dict[str, Any] = field(default_factory=lambda: {"low":1, "high":200, "step":1, "log":True}) criterion_space: Iterable[str] = ("squared_error", "absolute_error", "friedman_mse", "poisson") set_max_depth_space: Iterable[bool] = (True, False) - max_depth_space: Dict[str, Any] = MappingProxyType({"low":10, "high":2000, "step":1, "log":True}) - min_samples_split_space: Dict[str, Any] = MappingProxyType({"low":0.1, "high":1.0, "step":None, "log":False}) - min_samples_leaf_space: Dict[str, Any] = MappingProxyType({"low":0.1, "high":1.0, "step":None, "log":False}) - min_weight_fraction_leaf_space: Dict[str, Any] = MappingProxyType({"low":0.0, "high":0.5, "step":None, "log":False}) + max_depth_space: Dict[str, Any] = field(default_factory=lambda: {"low":10, "high":2000, "step":1, "log":True}) + min_samples_split_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.1, "high":1.0, "step":None, "log":False}) + min_samples_leaf_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.1, "high":1.0, "step":None, "log":False}) + min_weight_fraction_leaf_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.0, "high":0.5, "step":None, "log":False}) max_features_space: Iterable[str] = ("sqrt", "log2", None) set_max_leaf_nodes_space: Iterable[bool] = (True, False) - max_leaf_nodes_space: Dict[str, Any] = MappingProxyType({"low":2, "high":10000, "step":1, "log":True}) - min_impurity_decrease_space: Dict[str, Any] = MappingProxyType({"low":0.0, "high":1.0, "step":None, "log":False}) + max_leaf_nodes_space: Dict[str, Any] = field(default_factory=lambda: {"low":2, "high":10000, "step":1, "log":True}) + min_impurity_decrease_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.0, "high":1.0, "step":None, "log":False}) bootstrap_space: Iterable[bool] = (True, False) oob_score_space: Iterable[bool] = (True, False) set_random_state_space: Iterable[bool] = (False, ) - random_state_space: Dict[str, Any] = MappingProxyType({"low":1, "high":10000, "step":1, "log":True}) - ccp_alpha_space: Dict[str, Any] = MappingProxyType({"low":0.0, "high":1.0, "step":None, "log":False}) + random_state_space: Dict[str, Any] = field(default_factory=lambda: {"low":1, "high":10000, "step":1, "log":True}) + ccp_alpha_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.0, "high":1.0, "step":None, "log":False}) set_max_samples_space: Iterable[bool] = (True, False) - max_samples_space: Dict[str, Any] = MappingProxyType({"low":0.1, "high":1.0, "step":None, "log":False}) + max_samples_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.1, "high":1.0, "step":None, "log":False}) def sample_params(self, trial: Optional[Trial]=None) -> Dict[str, Any]: super().sample_params(trial) @@ -115,10 +114,10 @@ def sample_model(self, trial: Optional[Trial]=None) -> Any: @dataclass class AdaBoostRegressorTuner(BaseTuner): estimator_space: Iterable[Optional[object]] = (None, ) - n_estimators_space: Dict[str, Any] = MappingProxyType({"low":1, "high":200, "step":1, "log":True}) - learning_rate_space: Dict[str, Any] = MappingProxyType({"low":0.01, "high":1, "step":None, "log":True}) + n_estimators_space: Dict[str, Any] = field(default_factory=lambda: {"low":1, "high":200, "step":1, "log":True}) + learning_rate_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.01, "high":1, "step":None, "log":True}) loss_space: Iterable[str] = ("linear", "square", "exponential") - random_state_space: Dict[str, Any] = MappingProxyType({"low":1, "high":10000, "step":1, "log":True}) + random_state_space: Dict[str, Any] = field(default_factory=lambda: {"low":1, "high":10000, "step":1, "log":True}) def sample_params(self, trial: Optional[Trial]=None) -> Dict[str, Any]: super().sample_params(trial) @@ -144,27 +143,27 @@ def sample_model(self, trial: Optional[Trial]=None) -> Any: @dataclass class GradientBoostingRegressorTuner(BaseTuner): loss_space: Iterable[str] = ("squared_error", "absolute_error", "huber", "quantile") - learning_rate_space: Dict[str, Any] = MappingProxyType({"low":0.001, "high":1.0, "step":None, "log":True}) - n_estimators_space: Dict[str, Any] = MappingProxyType({"low":1, "high":100, "step":1, "log":True}) - subsample_space: Dict[str, Any] = MappingProxyType({"low":0.1, "high":1.0, "step":None, "log":False}) + learning_rate_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.001, "high":1.0, "step":None, "log":True}) + n_estimators_space: Dict[str, Any] = field(default_factory=lambda: {"low":1, "high":100, "step":1, "log":True}) + subsample_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.1, "high":1.0, "step":None, "log":False}) criterion_space: Iterable[str] = ("friedman_mse", "squared_error") - min_samples_split_space: Dict[str, Any] = MappingProxyType({"low":0.1, "high":1.0, "step":None, "log":False}) - min_samples_leaf_space: Dict[str, Any] = MappingProxyType({"low":0.1, "high":1.0, "step":None, "log":False}) - min_weight_fraction_leaf_space: Dict[str, Any] = MappingProxyType({"low":0.0, "high":0.5, "step":None, "log":False}) + min_samples_split_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.1, "high":1.0, "step":None, "log":False}) + min_samples_leaf_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.1, "high":1.0, "step":None, "log":False}) + min_weight_fraction_leaf_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.0, "high":0.5, "step":None, "log":False}) set_max_depth_space: Iterable[bool] = (True, False) - max_depth_space: Dict[str, Any] = MappingProxyType({"low":10, "high":2000, "step":1, "log":True}) - min_impurity_decrease_space: Dict[str, Any] = MappingProxyType({"low":0.0, "high":1.0, "step":None, "log":False}) + max_depth_space: Dict[str, Any] = field(default_factory=lambda: {"low":10, "high":2000, "step":1, "log":True}) + min_impurity_decrease_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.0, "high":1.0, "step":None, "log":False}) init_space: Iterable[Optional[object]] = (None, ) max_features_space: Iterable[str] = ("sqrt", "log2") - alpha_space: Dict[str, Any] = MappingProxyType({"low":0.01, "high":1, "step":None, "log":True}) + alpha_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.01, "high":1, "step":None, "log":True}) set_max_leaf_nodes_space: Iterable[bool] = (True, False) - max_leaf_nodes_space: Iterable[Optional[int]] = MappingProxyType({"low":2, "high":10000, "step":1, "log":True}) - validation_fraction_space: Dict[str, Any] = MappingProxyType({"low":0.1, "high":0.5, "step":None, "log":False}) + max_leaf_nodes_space: Iterable[Optional[int]] = field(default_factory=lambda: {"low":2, "high":10000, "step":1, "log":True}) + validation_fraction_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.1, "high":0.5, "step":None, "log":False}) set_n_iter_no_change_space: Iterable[bool] = (True, False) - n_iter_no_change_space: Dict[str, Any] = MappingProxyType({"low":1, "high":100, "step":1, "log":True}) - random_state_space: Dict[str, Any] = MappingProxyType({"low":1, "high":10000, "step":1, "log":True}) - tol_space: Dict[str, Any] = MappingProxyType({"low":1e-6, "high":1e-3, "step":None, "log":True}) - ccp_alpha_space: Dict[str, Any] = MappingProxyType({"low":0.0, "high":1.0, "step":None, "log":False}) + n_iter_no_change_space: Dict[str, Any] = field(default_factory=lambda: {"low":1, "high":100, "step":1, "log":True}) + random_state_space: Dict[str, Any] = field(default_factory=lambda: {"low":1, "high":10000, "step":1, "log":True}) + tol_space: Dict[str, Any] = field(default_factory=lambda: {"low":1e-6, "high":1e-3, "step":None, "log":True}) + ccp_alpha_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.0, "high":1.0, "step":None, "log":False}) def sample_params(self, trial: Optional[Trial]=None) -> Dict[str, Any]: super().sample_params(trial) @@ -246,25 +245,25 @@ def sample_model(self, trial: Optional[Trial]=None) -> Any: @dataclass class HistGradientBoostingRegressorTuner(BaseTuner): loss_space: Iterable[str] = ("squared_error", "absolute_error", "poisson", "quantile") - quantile_space: Dict[str, Any] = MappingProxyType({"low":0.0, "high":1.0, "step":None, "log":False}) - learning_rate_space: Dict[str, Any] = MappingProxyType({"low":0.001, "high":1.0, "step":None, "log":True}) - max_iter_space: Dict[str, Any] = MappingProxyType({"low":10, "high":1000, "step":1, "log":True}) + quantile_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.0, "high":1.0, "step":None, "log":False}) + learning_rate_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.001, "high":1.0, "step":None, "log":True}) + max_iter_space: Dict[str, Any] = field(default_factory=lambda: {"low":10, "high":1000, "step":1, "log":True}) set_max_leaf_nodes_space: Iterable[bool] = (True, False) - max_leaf_nodes_space: Iterable[Optional[int]] = MappingProxyType({"low":2, "high":10000, "step":1, "log":True}) + max_leaf_nodes_space: Iterable[Optional[int]] = field(default_factory=lambda: {"low":2, "high":10000, "step":1, "log":True}) set_max_depth_space: Iterable[bool] = (True, False) - max_depth_space: Dict[str, Any] = MappingProxyType({"low":10, "high":2000, "step":1, "log":True}) - min_samples_leaf_space: Dict[str, Any] = MappingProxyType({"low":1, "high":200, "step":1, "log":True}) - l2_regularization_space: Dict[str, Any] = MappingProxyType({"low":0.0, "high":1.0, "step":None, "log":False}) - max_bins_space: Dict[str, Any] = MappingProxyType({"low":10, "high":255, "step":1, "log":True}) + max_depth_space: Dict[str, Any] = field(default_factory=lambda: {"low":10, "high":2000, "step":1, "log":True}) + min_samples_leaf_space: Dict[str, Any] = field(default_factory=lambda: {"low":1, "high":200, "step":1, "log":True}) + l2_regularization_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.0, "high":1.0, "step":None, "log":False}) + max_bins_space: Dict[str, Any] = field(default_factory=lambda: {"low":10, "high":255, "step":1, "log":True}) categorical_features_space: Iterable[Any] = (None, ) monotonic_cst_space: Iterable[Any] = (None, ) interaction_cst_space: Iterable[Any] = (None, ) early_stopping_space: Iterable[bool] = ("auto", True, False) scoring_space: Iterable[Optional[Union[str, Callable]]] = ("loss", None) - validation_fraction_space: Dict[str, Any] = MappingProxyType({"low":0.1, "high":0.5, "step":None, "log":False}) - n_iter_no_change_space: Dict[str, Any] = MappingProxyType({"low":1, "high":100, "step":1, "log":True}) - tol_space: Dict[str, Any] = MappingProxyType({"low":1e-6, "high":1e-3, "step":None, "log":True}) - random_state_space: Dict[str, Any] = MappingProxyType({"low":1, "high":10000, "step":1, "log":True}) + validation_fraction_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.1, "high":0.5, "step":None, "log":False}) + n_iter_no_change_space: Dict[str, Any] = field(default_factory=lambda: {"low":1, "high":100, "step":1, "log":True}) + tol_space: Dict[str, Any] = field(default_factory=lambda: {"low":1e-6, "high":1e-3, "step":None, "log":True}) + random_state_space: Dict[str, Any] = field(default_factory=lambda: {"low":1, "high":10000, "step":1, "log":True}) def sample_params(self, trial: Optional[Trial]=None) -> Dict[str, Any]: super().sample_params(trial) diff --git a/tune_regressor/linear_model_regressor.py b/tune_regressor/linear_model_regressor.py index 2881bdb..10be43a 100644 --- a/tune_regressor/linear_model_regressor.py +++ b/tune_regressor/linear_model_regressor.py @@ -2,8 +2,7 @@ from ..baseline import BaseTuner from optuna.trial import Trial from dataclasses import dataclass, field -from typing import Iterable, Optional, Dict, Any, Union, Callable -from types import MappingProxyType +from typing import Iterable, Optional, Dict, Any, Union from sklearn.linear_model import ( LinearRegression, Lasso, @@ -55,13 +54,13 @@ def sample_model(self, trial: Optional[Trial]=None) -> Any: @dataclass class LassoTuner(BaseTuner): - alpha_space: Dict[str, Any] = MappingProxyType({"low":0.01, "high":1.0, "step":None, "log":True}) + alpha_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.01, "high":1.0, "step":None, "log":True}) fit_intercept_space: Iterable[bool] = (True, False) - max_iter_space: Dict[str, Any] = MappingProxyType({"low":100, "high":2000, "step":1, "log":True}) - tol_space: Dict[str, Any] = MappingProxyType({"low":1e-6, "high":1e-3, "step":None, "log":True}) + max_iter_space: Dict[str, Any] = field(default_factory=lambda: {"low":100, "high":2000, "step":1, "log":True}) + tol_space: Dict[str, Any] = field(default_factory=lambda: {"low":1e-6, "high":1e-3, "step":None, "log":True}) positive_space: Iterable[bool] = (True, False) selection_space: Iterable[str] = ("cyclic", "random") - random_state_space: Dict[str, Any] = MappingProxyType({"low":1, "high":10000, "step":1, "log":True}) + random_state_space: Dict[str, Any] = field(default_factory=lambda: {"low":1, "high":10000, "step":1, "log":True}) def sample_params(self, trial: Optional[Trial]=None) -> Dict[str, Any]: super().sample_params(trial) @@ -90,13 +89,13 @@ def sample_model(self, trial: Optional[Trial]=None) -> Any: @dataclass class RidgeTuner(BaseTuner): - alpha_space: Dict[str, Any] = MappingProxyType({"low":0.01, "high":1.0, "step":None, "log":True}) + alpha_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.01, "high":1.0, "step":None, "log":True}) fit_intercept_space: Iterable[bool] = (True, False) - max_iter_space: Dict[str, Any] = MappingProxyType({"low":100, "high":2000, "step":1, "log":True}) - tol_space: Dict[str, Any] = MappingProxyType({"low":1e-6, "high":1e-3, "step":None, "log":True}) + max_iter_space: Dict[str, Any] = field(default_factory=lambda: {"low":100, "high":2000, "step":1, "log":True}) + tol_space: Dict[str, Any] = field(default_factory=lambda: {"low":1e-6, "high":1e-3, "step":None, "log":True}) solver_space: Iterable[str] = ("auto", "svd", "cholesky", "lsqr", "sparse_cg", "sag", "saga", "lbfgs") positive_space: Iterable[bool] = (True, False) - random_state_space: Dict[str, Any] = MappingProxyType({"low":1, "high":10000, "step":1, "log":True}) + random_state_space: Dict[str, Any] = field(default_factory=lambda: {"low":1, "high":10000, "step":1, "log":True}) def sample_params(self, trial: Optional[Trial]=None) -> Dict[str, Any]: super().sample_params(trial) @@ -125,15 +124,15 @@ def sample_model(self, trial: Optional[Trial]=None) -> Any: @dataclass class ElasticNetTuner(BaseTuner): - alpha_space: Dict[str, Any] = MappingProxyType({"low":0.01, "high":1.0, "step":None, "log":True}) - l1_ratio_space: Dict[str, Any] = MappingProxyType({"low":0.0, "high":1.0, "step":None, "log":False}) + alpha_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.01, "high":1.0, "step":None, "log":True}) + l1_ratio_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.0, "high":1.0, "step":None, "log":False}) fit_intercept_space: Iterable[bool] = (True, False) precompute_space: Iterable[Union[bool, Iterable]] = (True, False, ) - max_iter_space: Dict[str, Any] = MappingProxyType({"low":100, "high":2000, "step":1, "log":True}) - tol_space: Dict[str, Any] = MappingProxyType({"low":1e-6, "high":1e-3, "step":None, "log":True}) + max_iter_space: Dict[str, Any] = field(default_factory=lambda: {"low":100, "high":2000, "step":1, "log":True}) + tol_space: Dict[str, Any] = field(default_factory=lambda: {"low":1e-6, "high":1e-3, "step":None, "log":True}) positive_space: Iterable[bool] = (True, False) selection_space: Iterable[str] = ("cyclic", "random") - random_state_space: Dict[str, Any] = MappingProxyType({"low":1, "high":10000, "step":1, "log":True}) + random_state_space: Dict[str, Any] = field(default_factory=lambda: {"low":1, "high":10000, "step":1, "log":True}) def sample_params(self, trial: Optional[Trial]=None) -> Dict[str, Any]: super().sample_params(trial) @@ -163,12 +162,12 @@ def sample_model(self, trial: Optional[Trial]=None) -> Any: @dataclass class MultiTaskLassoTuner(BaseTuner): - alpha_space: Dict[str, Any] = MappingProxyType({"low":0.01, "high":1.0, "step":None, "log":True}) + alpha_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.01, "high":1.0, "step":None, "log":True}) fit_intercept_space: Iterable[bool] = (True, False) - max_iter_space: Dict[str, Any] = MappingProxyType({"low":100, "high":2000, "step":1, "log":True}) - tol_space: Dict[str, Any] = MappingProxyType({"low":1e-6, "high":1e-3, "step":None, "log":True}) + max_iter_space: Dict[str, Any] = field(default_factory=lambda: {"low":100, "high":2000, "step":1, "log":True}) + tol_space: Dict[str, Any] = field(default_factory=lambda: {"low":1e-6, "high":1e-3, "step":None, "log":True}) selection_space: Iterable[str] = ("cyclic", "random") - random_state_space: Dict[str, Any] = MappingProxyType({"low":1, "high":10000, "step":1, "log":True}) + random_state_space: Dict[str, Any] = field(default_factory=lambda: {"low":1, "high":10000, "step":1, "log":True}) is_multitask: str = field(init=False, default=True) def sample_params(self, trial: Optional[Trial]=None) -> Dict[str, Any]: @@ -196,13 +195,13 @@ def sample_model(self, trial: Optional[Trial]=None) -> Any: @dataclass class MultiTaskElasticNetTuner(BaseTuner): - alpha_space: Dict[str, Any] = MappingProxyType({"low":0.01, "high":1.0, "step":None, "log":True}) - l1_ratio_space: Dict[str, Any] = MappingProxyType({"low":0.0, "high":1.0, "step":None, "log":False}) + alpha_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.01, "high":1.0, "step":None, "log":True}) + l1_ratio_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.0, "high":1.0, "step":None, "log":False}) fit_intercept_space: Iterable[bool] = (True, False) - max_iter_space: Dict[str, Any] = MappingProxyType({"low":100, "high":2000, "step":1, "log":True}) - tol_space: Dict[str, Any] = MappingProxyType({"low":1e-6, "high":1e-3, "step":None, "log":True}) + max_iter_space: Dict[str, Any] = field(default_factory=lambda: {"low":100, "high":2000, "step":1, "log":True}) + tol_space: Dict[str, Any] = field(default_factory=lambda: {"low":1e-6, "high":1e-3, "step":None, "log":True}) selection_space: Iterable[str] = ("cyclic", "random") - random_state_space: Dict[str, Any] = MappingProxyType({"low":1, "high":10000, "step":1, "log":True}) + random_state_space: Dict[str, Any] = field(default_factory=lambda: {"low":1, "high":10000, "step":1, "log":True}) is_multitask: str = field(init=False, default=True) def sample_params(self, trial: Optional[Trial]=None) -> Dict[str, Any]: @@ -234,11 +233,11 @@ def sample_model(self, trial: Optional[Trial]=None) -> Any: class LarsTuner(BaseTuner): fit_intercept_space: Iterable[bool] = (True, False) precompute_space: Iterable[bool] = (True, False) - n_nonzero_coefs_space: Dict[str, Any] = MappingProxyType({"low":1, "high":500, "step":1, "log":True}) - eps_space: Dict[str, Any] = MappingProxyType({"low":np.finfo(float).eps, "high":1e-10, "step":None, "log":True}) + n_nonzero_coefs_space: Dict[str, Any] = field(default_factory=lambda: {"low":1, "high":500, "step":1, "log":True}) + eps_space: Dict[str, Any] = field(default_factory=lambda: {"low":np.finfo(float).eps, "high":1e-10, "step":None, "log":True}) set_jitter_space: Iterable[bool] = (True, False) - jitter_space: Dict[str, Any] = MappingProxyType({"low":1e-8, "high":1e-3, "step":None, "log":True}) - random_state_space: Dict[str, Any] = MappingProxyType({"low":1, "high":10000, "step":1, "log":True}) + jitter_space: Dict[str, Any] = field(default_factory=lambda: {"low":1e-8, "high":1e-3, "step":None, "log":True}) + random_state_space: Dict[str, Any] = field(default_factory=lambda: {"low":1, "high":10000, "step":1, "log":True}) def sample_params(self, trial: Optional[Trial]=None) -> Dict[str, Any]: super().sample_params(trial) @@ -266,15 +265,15 @@ def sample_model(self, trial: Optional[Trial]=None) -> Any: @dataclass class LassoLarsTuner(BaseTuner): - alpha_space: Dict[str, Any] = MappingProxyType({"low":0.1, "high":1.0, "step":None, "log":False}) + alpha_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.1, "high":1.0, "step":None, "log":False}) fit_intercept_space: Iterable[bool] = (True, False) precompute_space: Iterable[bool] = (True, False) - max_iter_space: Dict[str, Any] = MappingProxyType({"low":100, "high":1000, "step":1, "log":True}) - eps_space: Dict[str, Any] = MappingProxyType({"low":np.finfo(float).eps, "high":1e-10, "step":None, "log":True}) + max_iter_space: Dict[str, Any] = field(default_factory=lambda: {"low":100, "high":1000, "step":1, "log":True}) + eps_space: Dict[str, Any] = field(default_factory=lambda: {"low":np.finfo(float).eps, "high":1e-10, "step":None, "log":True}) positive_space: Iterable[bool] = (True, False) set_jitter_space: Iterable[bool] = (True, False) - jitter_space: Dict[str, Any] = MappingProxyType({"low":1e-8, "high":1e-3, "step":None, "log":True}) - random_state_space: Dict[str, Any] = MappingProxyType({"low":1, "high":10000, "step":1, "log":True}) + jitter_space: Dict[str, Any] = field(default_factory=lambda: {"low":1e-8, "high":1e-3, "step":None, "log":True}) + random_state_space: Dict[str, Any] = field(default_factory=lambda: {"low":1, "high":10000, "step":1, "log":True}) def sample_params(self, trial: Optional[Trial]=None) -> Dict[str, Any]: super().sample_params(trial) @@ -307,11 +306,11 @@ class LassoLarsICTuner(BaseTuner): criterion_sapce: Iterable[str] = ("aic", "bic") fit_intercept_space: Iterable[bool] = (True, False) precompute_space: Iterable[bool] = (True, False) - max_iter_space: Dict[str, Any] = MappingProxyType({"low":100, "high":1000, "step":1, "log":True}) - eps_space: Dict[str, Any] = MappingProxyType({"low":np.finfo(float).eps, "high":1e-10, "step":None, "log":True}) + max_iter_space: Dict[str, Any] = field(default_factory=lambda: {"low":100, "high":1000, "step":1, "log":True}) + eps_space: Dict[str, Any] = field(default_factory=lambda: {"low":np.finfo(float).eps, "high":1e-10, "step":None, "log":True}) positive_space: Iterable[bool] = (True, False) set_noise_variance_space: Iterable[bool] = (True, False) - noise_variance_space: Dict[str, Any] = MappingProxyType({"low":1e-8, "high":1e-3, "step":None, "log":True}) + noise_variance_space: Dict[str, Any] = field(default_factory=lambda: {"low":1e-8, "high":1e-3, "step":None, "log":True}) def sample_params(self, trial: Optional[Trial]=None) -> Dict[str, Any]: super().sample_params(trial) @@ -340,15 +339,15 @@ def sample_model(self, trial: Optional[Trial]=None) -> Any: @dataclass class BayesianRidgeTuner(BaseTuner): - n_iter_space: Dict[str, Any] = MappingProxyType({"low":100, "high":1000, "step":1, "log":True}) - tol_space: Dict[str, Any] = MappingProxyType({"low":1e-6, "high":1e-3, "step":None, "log":True}) - alpha_1_space: Dict[str, Any] = MappingProxyType({"low":1e-6, "high":1e-3, "step":None, "log":True}) - alpha_2_space: Dict[str, Any] = MappingProxyType({"low":1e-6, "high":1e-3, "step":None, "log":True}) - lambda_1_space: Dict[str, Any] = MappingProxyType({"low":1e-6, "high":1e-3, "step":None, "log":True}) - lambda_2_space: Dict[str, Any] = MappingProxyType({"low":1e-6, "high":1e-3, "step":None, "log":True}) + n_iter_space: Dict[str, Any] = field(default_factory=lambda: {"low":100, "high":1000, "step":1, "log":True}) + tol_space: Dict[str, Any] = field(default_factory=lambda: {"low":1e-6, "high":1e-3, "step":None, "log":True}) + alpha_1_space: Dict[str, Any] = field(default_factory=lambda: {"low":1e-6, "high":1e-3, "step":None, "log":True}) + alpha_2_space: Dict[str, Any] = field(default_factory=lambda: {"low":1e-6, "high":1e-3, "step":None, "log":True}) + lambda_1_space: Dict[str, Any] = field(default_factory=lambda: {"low":1e-6, "high":1e-3, "step":None, "log":True}) + lambda_2_space: Dict[str, Any] = field(default_factory=lambda: {"low":1e-6, "high":1e-3, "step":None, "log":True}) set_alpha_init_space: Iterable[bool] = (True, False) - alpha_init_space: Iterable[bool] = MappingProxyType({"low":1e-8, "high":1.0, "step":None, "log":True}) - lambda_init_space: Dict[str, Any] = MappingProxyType({"low":1e-8, "high":1.0, "step":None, "log":True}) + alpha_init_space: Iterable[bool] = field(default_factory=lambda: {"low":1e-8, "high":1.0, "step":None, "log":True}) + lambda_init_space: Dict[str, Any] = field(default_factory=lambda: {"low":1e-8, "high":1.0, "step":None, "log":True}) compute_score_space: Iterable[bool] = (True, False) fit_intercept_space: Iterable[bool] = (True, False) @@ -382,13 +381,13 @@ def sample_model(self, trial: Optional[Trial]=None) -> Any: @dataclass class ARDRegressionTuner(BaseTuner): - n_iter_space: Dict[str, Any] = MappingProxyType({"low":100, "high":1000, "step":1, "log":True}) - tol_space: Dict[str, Any] = MappingProxyType({"low":1e-6, "high":1e-3, "step":None, "log":True}) - alpha_1_space: Dict[str, Any] = MappingProxyType({"low":1e-6, "high":1e-3, "step":None, "log":True}) - alpha_2_space: Dict[str, Any] = MappingProxyType({"low":1e-6, "high":1e-3, "step":None, "log":True}) - lambda_1_space: Dict[str, Any] = MappingProxyType({"low":1e-6, "high":1e-3, "step":None, "log":True}) - lambda_2_space: Dict[str, Any] = MappingProxyType({"low":1e-6, "high":1e-3, "step":None, "log":True}) - threshold_lambda_space: Dict[str, Any] = MappingProxyType({"low":1e3, "high":1e5, "step":None, "log":True}) + n_iter_space: Dict[str, Any] = field(default_factory=lambda: {"low":100, "high":1000, "step":1, "log":True}) + tol_space: Dict[str, Any] = field(default_factory=lambda: {"low":1e-6, "high":1e-3, "step":None, "log":True}) + alpha_1_space: Dict[str, Any] = field(default_factory=lambda: {"low":1e-6, "high":1e-3, "step":None, "log":True}) + alpha_2_space: Dict[str, Any] = field(default_factory=lambda: {"low":1e-6, "high":1e-3, "step":None, "log":True}) + lambda_1_space: Dict[str, Any] = field(default_factory=lambda: {"low":1e-6, "high":1e-3, "step":None, "log":True}) + lambda_2_space: Dict[str, Any] = field(default_factory=lambda: {"low":1e-6, "high":1e-3, "step":None, "log":True}) + threshold_lambda_space: Dict[str, Any] = field(default_factory=lambda: {"low":1e3, "high":1e5, "step":None, "log":True}) compute_score_space: Iterable[bool] = (True, False) fit_intercept_space: Iterable[bool] = (True, False) @@ -418,8 +417,8 @@ def sample_model(self, trial: Optional[Trial]=None) -> Any: @dataclass class OrthogonalMatchingPursuitTuner(BaseTuner): set_nonzero_coefs_space: Iterable[bool] = (True, False) - n_nonzero_coefs_space: Dict[str, Any] = MappingProxyType({"low":1, "high":500, "step":1, "log":True}) - tol_space: Dict[str, Any] = MappingProxyType({"low":1e-6, "high":1e-3, "step":None, "log":True}) + n_nonzero_coefs_space: Dict[str, Any] = field(default_factory=lambda: {"low":1, "high":500, "step":1, "log":True}) + tol_space: Dict[str, Any] = field(default_factory=lambda: {"low":1e-6, "high":1e-3, "step":None, "log":True}) fit_intercept_space: Iterable[bool] = (True, False) precompute_space: Iterable[bool] = (True, False) @@ -447,17 +446,17 @@ def sample_model(self, trial: Optional[Trial]=None) -> Any: @dataclass class PassiveAggressiveRegressorTuner(BaseTuner): - C_space: Dict[str, Any] = MappingProxyType({"low":0.9, "high":1.0, "step":None, "log":False}) + C_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.9, "high":1.0, "step":None, "log":False}) fit_intercept_space: Iterable[bool] = (True, False) - max_iter_space: Dict[str, Any] = MappingProxyType({"low":100, "high":2000, "step":1, "log":True}) - tol_space: Dict[str, Any] = MappingProxyType({"low":1e-6, "high":1e-3, "step":None, "log":True}) + max_iter_space: Dict[str, Any] = field(default_factory=lambda: {"low":100, "high":2000, "step":1, "log":True}) + tol_space: Dict[str, Any] = field(default_factory=lambda: {"low":1e-6, "high":1e-3, "step":None, "log":True}) early_stopping_space: Iterable[bool] = (True, False) - validation_fraction_space: Dict[str, Any] = MappingProxyType({"low":0.1, "high":0.5, "step":None, "log":False}) - n_iter_no_change_space: Dict[str, Any] = MappingProxyType({"low":1, "high":100, "step":1, "log":False}) + validation_fraction_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.1, "high":0.5, "step":None, "log":False}) + n_iter_no_change_space: Dict[str, Any] = field(default_factory=lambda: {"low":1, "high":100, "step":1, "log":False}) shuffle_space: Iterable[bool] = (True, False) loss_space: Iterable[str] = ("epsilon_insensitive", ) - random_state_space: Dict[str, Any] = MappingProxyType({"low":1, "high":10000, "step":1, "log":True}) - epsilon_space: Dict[str, Any] = MappingProxyType({"low":0.05, "high":0.5, "step":None, "log":True}) + random_state_space: Dict[str, Any] = field(default_factory=lambda: {"low":1, "high":10000, "step":1, "log":True}) + epsilon_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.05, "high":0.5, "step":None, "log":True}) average_space: Iterable[bool] = (True, False) def sample_params(self, trial: Optional[Trial]=None) -> Dict[str, Any]: @@ -493,8 +492,8 @@ def sample_model(self, trial: Optional[Trial]=None) -> Any: @dataclass class QuantileRegressorTuner(BaseTuner): - quantile_space: Dict[str, Any] = MappingProxyType({"low":0.1, "high":1.0, "step":None, "log":False}) - alpha_space: Dict[str, Any] = MappingProxyType({"low":0.01, "high":1.0, "step":None, "log":True}) + quantile_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.1, "high":1.0, "step":None, "log":False}) + alpha_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.01, "high":1.0, "step":None, "log":True}) fit_intercept_space: Iterable[bool] = (True, False) solver_space: Iterable[str] = ("highs-ds", "highs-ipm", "highs", "revised simplex") solver_options_space: Iterable[Optional[Dict[str, Any]]] = (None, ) @@ -528,20 +527,20 @@ class SGDRegressorTuner(BaseTuner): "epsilon_insensitive", "squared_epsilon_insensitive") penalty_space: Iterable[str] = ("l1", "l2", "elasticnet", None) - alpha_space: Dict[str, Any] = MappingProxyType({"low":1e-5, "high":1.0, "step":None, "log":True}) - l1_ratio_space: Dict[str, Any] = MappingProxyType({"low":0.0, "high":1.0, "step":None, "log":False}) + alpha_space: Dict[str, Any] = field(default_factory=lambda: {"low":1e-5, "high":1.0, "step":None, "log":True}) + l1_ratio_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.0, "high":1.0, "step":None, "log":False}) fit_intercept_space: Iterable[bool] = (True, False) - max_iter_space: Dict[str, Any] = MappingProxyType({"low":100, "high":2000, "step":1, "log":True}) - tol_space: Dict[str, Any] = MappingProxyType({"low":1e-6, "high":1e-3, "step":None, "log":True}) + max_iter_space: Dict[str, Any] = field(default_factory=lambda: {"low":100, "high":2000, "step":1, "log":True}) + tol_space: Dict[str, Any] = field(default_factory=lambda: {"low":1e-6, "high":1e-3, "step":None, "log":True}) shuffle_space: Iterable[bool] = (True, False) - epsilon_space: Dict[str, Any] = MappingProxyType({"low":0.1, "high":1.0, "step":None, "log":False}) - random_state_space: Dict[str, Any] = MappingProxyType({"low":1, "high":10000, "step":1, "log":True}) + epsilon_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.1, "high":1.0, "step":None, "log":False}) + random_state_space: Dict[str, Any] = field(default_factory=lambda: {"low":1, "high":10000, "step":1, "log":True}) learning_rate_space: Iterable[str] = ("constant", "optimal", "invscaling", "adaptive") - eta0_space: Dict[str, Any] = MappingProxyType({"low":0.1, "high":1.0, "step":None, "log":False}) - power_t_space: Dict[str, Any] = MappingProxyType({"low":-1.0, "high":1.0, "step":None, "log":False}) + eta0_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.1, "high":1.0, "step":None, "log":False}) + power_t_space: Dict[str, Any] = field(default_factory=lambda: {"low":-1.0, "high":1.0, "step":None, "log":False}) early_stopping_space: Iterable[bool] = (True, False) - validation_fraction_space: Dict[str, Any] = MappingProxyType({"low":0.1, "high":0.5, "step":None, "log":False}) - n_iter_no_change_space: Dict[str, Any] = MappingProxyType({"low":1, "high":100, "step":1, "log":False}) + validation_fraction_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.1, "high":0.5, "step":None, "log":False}) + n_iter_no_change_space: Dict[str, Any] = field(default_factory=lambda: {"low":1, "high":100, "step":1, "log":False}) average_space: Iterable[bool] = (True, False) def sample_params(self, trial: Optional[Trial]=None) -> Dict[str, Any]: @@ -582,11 +581,11 @@ def sample_model(self, trial: Optional[Trial]=None) -> Any: @dataclass class PoissonRegressorTuner(BaseTuner): - alpha_space: Dict[str, Any] = MappingProxyType({"low":1e-5, "high":1.0, "step":None, "log":True}) + alpha_space: Dict[str, Any] = field(default_factory=lambda: {"low":1e-5, "high":1.0, "step":None, "log":True}) fit_intercept_space: Iterable[bool] = (True, False) solver_space: Iterable[str] = ("lbfgs", "newton-cholesky") - max_iter_space: Dict[str, Any] = MappingProxyType({"low":100, "high":2000, "step":1, "log":True}) - tol_space: Dict[str, Any] = MappingProxyType({"low":1e-6, "high":1e-3, "step":None, "log":True}) + max_iter_space: Dict[str, Any] = field(default_factory=lambda: {"low":100, "high":2000, "step":1, "log":True}) + tol_space: Dict[str, Any] = field(default_factory=lambda: {"low":1e-6, "high":1e-3, "step":None, "log":True}) def sample_params(self, trial: Optional[Trial]=None) -> Dict[str, Any]: super().sample_params(trial) @@ -626,13 +625,13 @@ def sample_model(self, trial: Optional[Trial]=None) -> Any: @dataclass class TweedieRegressorTuner(BaseTuner): - power_space: Dict[str, Any] = MappingProxyType({"low":1e-5, "high":3.0, "step":None, "log":True}) - alpha_space: Dict[str, Any] = MappingProxyType({"low":1e-5, "high":1.0, "step":None, "log":True}) + power_space: Dict[str, Any] = field(default_factory=lambda: {"low":1e-5, "high":3.0, "step":None, "log":True}) + alpha_space: Dict[str, Any] = field(default_factory=lambda: {"low":1e-5, "high":1.0, "step":None, "log":True}) fit_intercept_space: Iterable[bool] = (True, False) link_space: Iterable[str] = ("auto", "identity", "log") solver_space: Iterable[str] = ("lbfgs", "newton-cholesky") - max_iter_space: Dict[str, Any] = MappingProxyType({"low":100, "high":1000, "step":1, "log":True}) - tol_space: Dict[str, Any] = MappingProxyType({"low":1e-6, "high":1e-3, "step":None, "log":True}) + max_iter_space: Dict[str, Any] = field(default_factory=lambda: {"low":100, "high":1000, "step":1, "log":True}) + tol_space: Dict[str, Any] = field(default_factory=lambda: {"low":1e-6, "high":1e-3, "step":None, "log":True}) model: Any = None def sample_params(self, trial: Optional[Trial]=None) -> Dict[str, Any]: @@ -660,11 +659,11 @@ def sample_model(self, trial: Optional[Trial]=None) -> Any: @dataclass class HuberRegressorTuner(BaseTuner): - epsilon_space: Dict[str, Any] = MappingProxyType({"low":1.0, "high":10.0, "step":None, "log":True}) - max_iter_space: Dict[str, Any] = MappingProxyType({"low":100, "high":1000, "step":1, "log":True}) - alpha_space: Dict[str, Any] = MappingProxyType({"low":1e-5, "high":1.0, "step":None, "log":True}) + epsilon_space: Dict[str, Any] = field(default_factory=lambda: {"low":1.0, "high":10.0, "step":None, "log":True}) + max_iter_space: Dict[str, Any] = field(default_factory=lambda: {"low":100, "high":1000, "step":1, "log":True}) + alpha_space: Dict[str, Any] = field(default_factory=lambda: {"low":1e-5, "high":1.0, "step":None, "log":True}) fit_intercept_space: Iterable[bool] = (True, False) - tol_space: Dict[str, Any] = MappingProxyType({"low":1e-6, "high":1e-3, "step":None, "log":True}) + tol_space: Dict[str, Any] = field(default_factory=lambda: {"low":1e-6, "high":1e-3, "step":None, "log":True}) model: Any = None def sample_params(self, trial: Optional[Trial]=None) -> Dict[str, Any]: @@ -690,12 +689,12 @@ def sample_model(self, trial: Optional[Trial]=None) -> Any: @dataclass class TheilSenRegressorTuner(BaseTuner): fit_intercept_space: Iterable[bool] = (True, False) - max_subpopulation_space: Dict[str, Any] = MappingProxyType({"low":100, "high":1e5, "step":1, "log":True}) + max_subpopulation_space: Dict[str, Any] = field(default_factory=lambda: {"low":100, "high":1e5, "step":1, "log":True}) set_n_subsamples_space: Iterable[bool] = (False, ) - n_subsamples_space: Optional[Dict[str, Any]] = MappingProxyType({"low":1, "high":40, "step":1, "log":True}) - max_iter_space: Dict[str, Any] = MappingProxyType({"low":100, "high":300, "step":1, "log":True}) - tol_space: Dict[str, Any] = MappingProxyType({"low":1e-6, "high":1e-3, "step":None, "log":True}) - random_state_space: Dict[str, Any] = MappingProxyType({"low":1, "high":10000, "step":1, "log":True}) + n_subsamples_space: Optional[Dict[str, Any]] = field(default_factory=lambda: {"low":1, "high":40, "step":1, "log":True}) + max_iter_space: Dict[str, Any] = field(default_factory=lambda: {"low":100, "high":300, "step":1, "log":True}) + tol_space: Dict[str, Any] = field(default_factory=lambda: {"low":1e-6, "high":1e-3, "step":None, "log":True}) + random_state_space: Dict[str, Any] = field(default_factory=lambda: {"low":1, "high":10000, "step":1, "log":True}) model: Any = None def sample_params(self, trial: Optional[Trial]=None) -> Dict[str, Any]: @@ -726,14 +725,14 @@ def sample_model(self, trial: Optional[Trial]=None) -> Any: @dataclass class RANSACRegressorTuner(BaseTuner): estimator: Optional[Union[RegressorMixin, BaseEstimator]] = None - min_samples_space: Dict[str, Any] = MappingProxyType({"low":0.0, "high":1.0, "step":None, "log":False}) - residual_threshold_space: Dict[str, Any] = MappingProxyType({"low":0.0, "high":1.0, "step":None, "log":False}) - max_trials_space: Dict[str, Any] = MappingProxyType({"low":100, "high":1000, "step":1, "log":True}) - max_skips_space: Dict[str, Any] = MappingProxyType({"low":1, "high":1e5, "step":1, "log":True}) - stop_n_inliers_space: Dict[str, Any] = MappingProxyType({"low":1, "high":1e5, "step":1, "log":True}) - stop_score_space: Dict[str, Any] = MappingProxyType({"low":1.0, "high":1e5, "step":None, "log":True}) - stop_probability_space: Dict[str, Any] = MappingProxyType({"low":0.5, "high":0.99, "step":None, "log":False}) - random_state_space: Dict[str, Any] = MappingProxyType({"low":1, "high":10000, "step":1, "log":True}) + min_samples_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.0, "high":1.0, "step":None, "log":False}) + residual_threshold_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.0, "high":1.0, "step":None, "log":False}) + max_trials_space: Dict[str, Any] = field(default_factory=lambda: {"low":100, "high":1000, "step":1, "log":True}) + max_skips_space: Dict[str, Any] = field(default_factory=lambda: {"low":1, "high":1e5, "step":1, "log":True}) + stop_n_inliers_space: Dict[str, Any] = field(default_factory=lambda: {"low":1, "high":1e5, "step":1, "log":True}) + stop_score_space: Dict[str, Any] = field(default_factory=lambda: {"low":1.0, "high":1e5, "step":None, "log":True}) + stop_probability_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.5, "high":0.99, "step":None, "log":False}) + random_state_space: Dict[str, Any] = field(default_factory=lambda: {"low":1, "high":10000, "step":1, "log":True}) model: Any = None def sample_params(self, trial: Optional[Trial]=None) -> Dict[str, Any]: diff --git a/tune_regressor/mlp_regressor.py b/tune_regressor/mlp_regressor.py index 934e16f..131418f 100644 --- a/tune_regressor/mlp_regressor.py +++ b/tune_regressor/mlp_regressor.py @@ -1,6 +1,6 @@ from optuna.trial import Trial from dataclasses import dataclass -from typing import Optional, Dict, Any, Callable +from typing import Optional, Dict, Any from sklearn.neural_network import MLPRegressor from ..tune_classifier import MLPClassifierTuner diff --git a/tune_regressor/neighbor_regressor.py b/tune_regressor/neighbor_regressor.py index 6a5d038..0ce590a 100644 --- a/tune_regressor/neighbor_regressor.py +++ b/tune_regressor/neighbor_regressor.py @@ -1,8 +1,7 @@ from ..baseline import BaseTuner from optuna.trial import Trial -from dataclasses import dataclass -from typing import Iterable, Optional, Dict, Any, Callable -from types import MappingProxyType +from dataclasses import dataclass, field +from typing import Iterable, Optional, Dict, Any from sklearn.neighbors import KNeighborsRegressor, RadiusNeighborsRegressor from ..tune_classifier import KNeighborsClassifierTuner @@ -25,11 +24,11 @@ def sample_model(self, trial: Optional[Trial] = None) -> Any: @dataclass class RadiusNeighborsRegressorTuner(BaseTuner): - radius_space: Dict[str, Any] = MappingProxyType({"low":2, "high":20, "step":1, "log":False}) + radius_space: Dict[str, Any] = field(default_factory=lambda: {"low":2, "high":20, "step":1, "log":False}) weight_space: Iterable[str] = ("uniform", "distance") algorithm_space: Iterable[str] = ("ball_tree", "kd_tree", "brute") - leaf_size_space: Dict[str, Any] = MappingProxyType({"low":2, "high":100, "step":1, "log":True}) - p_space: Dict[str, Any] = MappingProxyType({"low":3, "high":10, "step":1, "log":False}) + leaf_size_space: Dict[str, Any] = field(default_factory=lambda: {"low":2, "high":100, "step":1, "log":True}) + p_space: Dict[str, Any] = field(default_factory=lambda: {"low":3, "high":10, "step":1, "log":False}) metric_space: Iterable[str] = ("cityblock", "cosine", "euclidean", "manhattan", "minkowski") def sample_params(self, trial: Optional[Trial] = None) -> Dict[str, Any]: diff --git a/tune_regressor/svr.py b/tune_regressor/svr.py index 017e85d..59a6751 100644 --- a/tune_regressor/svr.py +++ b/tune_regressor/svr.py @@ -1,21 +1,20 @@ from ..baseline import BaseTuner from optuna.trial import Trial -from dataclasses import dataclass -from typing import Iterable, Optional, Dict, Any, Callable -from types import MappingProxyType +from dataclasses import dataclass, field +from typing import Iterable, Optional, Dict, Any from sklearn.svm import SVR, LinearSVR, NuSVR @dataclass class SVRTuner(BaseTuner): kernel_space: Iterable[str] = ("linear", "poly", "rbf", "sigmoid") - degree_space: Dict[str, Any] = MappingProxyType({"low":1, "high":5, "step":1, "log":False}) + degree_space: Dict[str, Any] = field(default_factory=lambda: {"low":1, "high":5, "step":1, "log":False}) gamma_space: Iterable[str] = ("scale", "auto") - coef0_space: Dict[str, Any] = MappingProxyType({"low":0.0, "high":0.5, "step":None, "log":False}) - tol_space: Dict[str, Any] = MappingProxyType({"low":1e-6, "high":1e-3, "step":None, "log":True}) - C_space: Dict[str, Any] = MappingProxyType({"low":0.5, "high":1.0, "step":None, "log":False}) + coef0_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.0, "high":0.5, "step":None, "log":False}) + tol_space: Dict[str, Any] = field(default_factory=lambda: {"low":1e-6, "high":1e-3, "step":None, "log":True}) + C_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.5, "high":1.0, "step":None, "log":False}) shrinking_space: Iterable[bool] = (True, ) - epsilon_space: Dict[str, Any] = MappingProxyType({"low":0.1, "high":0.5, "step":None, "log":False}) + epsilon_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.1, "high":0.5, "step":None, "log":False}) def sample_params(self, trial: Optional[Trial] = None) -> Dict[str, Any]: super().sample_params(trial) @@ -43,15 +42,15 @@ def sample_model(self, trial: Optional[Trial] = None) -> Any: @dataclass class LinearSVRTuner(BaseTuner): - epsilon_space: Dict[str, Any] = MappingProxyType({"low":0.0, "high":0.5, "step":None, "log":False}) - tol_space: Dict[str, Any] = MappingProxyType({"low":1e-6, "high":1e-3, "step":None, "log":True}) - C_space: Dict[str, Any] = MappingProxyType({"low":0.5, "high":1.0, "step":None, "log":False}) + epsilon_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.0, "high":0.5, "step":None, "log":False}) + tol_space: Dict[str, Any] = field(default_factory=lambda: {"low":1e-6, "high":1e-3, "step":None, "log":True}) + C_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.5, "high":1.0, "step":None, "log":False}) loss_space: Iterable[str] = ("epsilon_insensitive", "squared_epsilon_insensitive") fit_intercept_space: Iterable[bool] = (True, False) - intercept_scaling_space: Dict[str, Any] = MappingProxyType({"low":0.5, "high":1.0, "step":None, "log":False}) + intercept_scaling_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.5, "high":1.0, "step":None, "log":False}) dual_space: Iterable[bool] = (True, False) - max_iter_space: Dict[str, Any] = MappingProxyType({"low":500, "high":2000, "step":1, "log":True}) - random_state_space: Dict[str, Any] = MappingProxyType({"low":1, "high":10000, "step":1, "log":True}) + max_iter_space: Dict[str, Any] = field(default_factory=lambda: {"low":500, "high":2000, "step":1, "log":True}) + random_state_space: Dict[str, Any] = field(default_factory=lambda: {"low":1, "high":10000, "step":1, "log":True}) def sample_params(self, trial: Optional[Trial]=None) -> Dict[str, Any]: super().sample_params(trial) @@ -80,14 +79,14 @@ def sample_model(self, trial: Optional[Trial]=None) -> Any: @dataclass class NuSVRTuner(BaseTuner): - nu_space: Dict[str, Any] = MappingProxyType({"low":0.1, "high":0.5, "step":None, "log":False}) - C_space: Dict[str, Any] = MappingProxyType({"low":0.5, "high":1.0, "step":None, "log":False}) + nu_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.1, "high":0.5, "step":None, "log":False}) + C_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.5, "high":1.0, "step":None, "log":False}) kernel_space: Iterable[str] = ("linear", "poly", "rbf", "sigmoid") - degree_space: Dict[str, Any] = MappingProxyType({"low":1, "high":5, "step":1, "log":False}) + degree_space: Dict[str, Any] = field(default_factory=lambda: {"low":1, "high":5, "step":1, "log":False}) gamma_space: Iterable[str] = ("scale", "auto") - coef0_space: Dict[str, Any] = MappingProxyType({"low":0.0, "high":0.5, "step":None, "log":False}) + coef0_space: Dict[str, Any] = field(default_factory=lambda: {"low":0.0, "high":0.5, "step":None, "log":False}) shrinking_space: Iterable[bool] = (True, ) - tol_space: Dict[str, Any] = MappingProxyType({"low":1e-6, "high":1e-3, "step":None, "log":True}) + tol_space: Dict[str, Any] = field(default_factory=lambda: {"low":1e-6, "high":1e-3, "step":None, "log":True}) def sample_params(self, trial: Optional[Trial]=None) -> Dict[str, Any]: super().sample_params(trial) diff --git a/tune_regressor/tree_regressor.py b/tune_regressor/tree_regressor.py index 74146c7..0156642 100644 --- a/tune_regressor/tree_regressor.py +++ b/tune_regressor/tree_regressor.py @@ -1,8 +1,7 @@ from ..baseline import BaseTuner from optuna.trial import Trial -from dataclasses import dataclass -from typing import Iterable, Optional, Dict, Any, Union, Callable -from types import MappingProxyType +from dataclasses import dataclass, field +from typing import Iterable, Optional, Dict, Any from sklearn.tree import DecisionTreeRegressor, ExtraTreeRegressor @@ -10,15 +9,15 @@ class DecisionTreeRegressorTuner(BaseTuner): criterion_space: Iterable[str] = ("squared_error", "friedman_mse", "absolute_error", "poisson") splitter_space: Iterable[str] = ("best", "random") - max_depth_space: Iterable[int] = MappingProxyType({"low":2, "high":1000, "step":1, "log":True}) - min_samples_split_space: Dict[str, Any] = MappingProxyType({"low":1e-4, "high":1.0, "step":None, "log":True}) - min_samples_leaf_space: Dict[str, Any] = MappingProxyType({"low":1e-4, "high":1.0, "step":None, "log":True}) - min_weight_fraction_leaf_space: Iterable[float] = MappingProxyType({"low":0.0, "high":0.5, "step":None, "log":False}) + max_depth_space: Iterable[int] = field(default_factory=lambda: {"low":2, "high":1000, "step":1, "log":True}) + min_samples_split_space: Dict[str, Any] = field(default_factory=lambda: {"low":1e-4, "high":1.0, "step":None, "log":True}) + min_samples_leaf_space: Dict[str, Any] = field(default_factory=lambda: {"low":1e-4, "high":1.0, "step":None, "log":True}) + min_weight_fraction_leaf_space: Iterable[float] = field(default_factory=lambda: {"low":0.0, "high":0.5, "step":None, "log":False}) max_features_space: Iterable[Optional[str]] = ("sqrt", "log2", None) - random_state_space: Iterable[int] = MappingProxyType({"low":1, "high":10000, "step":1, "log":True}) - max_leaf_nodes_space: Iterable[int] = MappingProxyType({"low":2, "high":1000, "step":1, "log":True}) - min_impurity_decrease_space: Iterable[float] = MappingProxyType({"low":0.0, "high":1.0, "step":None, "log":False}) - ccp_alpha_space: Iterable[float] = MappingProxyType({"low":0.0, "high":1.0, "step":None, "log":False}) + random_state_space: Iterable[int] = field(default_factory=lambda: {"low":1, "high":10000, "step":1, "log":True}) + max_leaf_nodes_space: Iterable[int] = field(default_factory=lambda: {"low":2, "high":1000, "step":1, "log":True}) + min_impurity_decrease_space: Iterable[float] = field(default_factory=lambda: {"low":0.0, "high":1.0, "step":None, "log":False}) + ccp_alpha_space: Iterable[float] = field(default_factory=lambda: {"low":0.0, "high":1.0, "step":None, "log":False}) def sample_params(self, trial: Optional[Trial]=None) -> Dict[str, Any]: super().sample_params(trial)