File tree Expand file tree Collapse file tree 1 file changed +5
-1
lines changed Expand file tree Collapse file tree 1 file changed +5
-1
lines changed Original file line number Diff line number Diff line change @@ -72,6 +72,7 @@ def load_model_emb(args, tokenizer):
72
72
### random emb or pre-defined embedding like glove embedding. You can custome your own init here.
73
73
model = torch .nn .Embedding (tokenizer .vocab_size , args .hidden_dim )
74
74
path_save = '{}/random_emb.torch' .format (args .checkpoint_path )
75
+ path_save_ind = path_save + ".done"
75
76
if int (os .environ ['LOCAL_RANK' ]) == 0 :
76
77
if os .path .exists (path_save ):
77
78
print ('reload the random embeddings' , model )
@@ -80,8 +81,11 @@ def load_model_emb(args, tokenizer):
80
81
print ('initializing the random embeddings' , model )
81
82
torch .nn .init .normal_ (model .weight )
82
83
torch .save (model .state_dict (), path_save )
84
+ os .sync ()
85
+ with open (path_save_ind , "x" ) as _ :
86
+ pass
83
87
else :
84
- while not os .path .exists (path_save ):
88
+ while not os .path .exists (path_save_ind ):
85
89
time .sleep (1 )
86
90
print ('reload the random embeddings' , model )
87
91
model .load_state_dict (torch .load (path_save ))
You can’t perform that action at this time.
0 commit comments