File tree Expand file tree Collapse file tree 2 files changed +0
-16
lines changed Expand file tree Collapse file tree 2 files changed +0
-16
lines changed Original file line number Diff line number Diff line change @@ -52,12 +52,6 @@ def __init__(self):
52
52
self .fp16 = False # Whether to use half-precision floating point
53
53
self .zero_init_vf = True # Set all initial weights for value function head to zero
54
54
self .small_init_pi = False # Set initial weights for policy head to small values and biases to zero
55
- self .spatial_attn = False # Use parametric spatial attention
56
- self .item_item_spatial_attn = False # Use parametric spatial attention on item<->item self attention layer
57
- self .spatial_attn_lr_multiplier = 1.0 # Increased learning rate for parametric spatial attention parameters
58
- self .spatial_attn_scale = 10000.0
59
- self .spatial_attn_init_scale = 0.1
60
- self .item_item_spatial_attn_vf = False
61
55
62
56
self .resume_from = '' # Filepath to saved policy
63
57
Original file line number Diff line number Diff line change @@ -457,16 +457,6 @@ def train(hps: HyperParams, out_dir: str) -> None:
457
457
metrics [f'build_{ action } ' ] = count
458
458
for action , fraction in normalize (buildmean ).items ():
459
459
metrics [f'frac_{ action } ' ] = fraction
460
- if hps .spatial_attn :
461
- for i in range (hps .nhead ):
462
- metrics [f'gattn.mean[{ i } ]' ] = policy .gattn .mean [i ]
463
- metrics [f'gattn.logvariance[{ i } ]' ] = policy .gattn .logvariance [i ]
464
- metrics [f'gattn.weight[{ i } ]' ] = policy .gattn .weight [i ]
465
- if hps .item_item_spatial_attn :
466
- for i in range (hps .nhead ):
467
- metrics [f'gattn_ii.mean[{ i } ]' ] = policy .gattn_ii .mean [i ]
468
- metrics [f'gattn_ii.logvariance[{ i } ]' ] = policy .gattn_ii .logvariance [i ]
469
- metrics [f'gattn_ii.weight[{ i } ]' ] = policy .gattn_ii .weight [i ]
470
460
471
461
metrics .update (adr .metrics ())
472
462
total_norm = 0.0
You can’t perform that action at this time.
0 commit comments