@@ -29,9 +29,8 @@ def _get_data(self):
29
29
X = np .random .normal (size = (1000 , 3 ))
30
30
T = np .random .binomial (2 , scipy .special .expit (X [:, 0 ]))
31
31
sigma = 0.001
32
- y = (1 + .5 * X [:, 0 ]) * T + X [:, 0 ] + np .random .normal (0 , sigma , size = (1000 ,))
32
+ y = (1 + .5 * X [:, 0 ]) * T + X [:, 0 ] + np .random .normal (0 , sigma , size = (1000 ,))
33
33
return y , T , X , X [:, 0 ]
34
-
35
34
36
35
def test_comparison (self ):
37
36
def reg ():
@@ -53,7 +52,7 @@ def clf():
53
52
('dalearner' , DomainAdaptationLearner (models = reg (), final_models = reg (), propensity_model = clf ())),
54
53
('slearner' , SLearner (overall_model = reg ())),
55
54
('tlearner' , TLearner (models = reg ())),
56
- ('drlearner' , DRLearner (model_propensity = 'auto' ,model_regression = 'auto' ,
55
+ ('drlearner' , DRLearner (model_propensity = 'auto' , model_regression = 'auto' ,
57
56
model_final = reg (), cv = 3 )),
58
57
('rlearner' , NonParamDML (model_y = reg (), model_t = clf (), model_final = reg (),
59
58
discrete_treatment = True , cv = 3 )),
@@ -72,8 +71,8 @@ def clf():
72
71
multitask_model_final = False ,
73
72
featurizer = None ,
74
73
min_propensity = 1e-6 ,
75
- cv = 3 ,
76
- mc_iters = 2 ,
74
+ cv = 3 ,
75
+ mc_iters = 2 ,
77
76
mc_agg = 'median' )
78
77
scorer .fit (Y_val , T_val , X = X_val )
79
78
rscore = [scorer .score (mdl ) for _ , mdl in models ]
0 commit comments