@@ -552,6 +552,9 @@ def load_lora_model(self, model, lora, strength_model):
552552
553553
554554class SaveLoRA :
555+ def __init__ (self ):
556+ self .output_dir = folder_paths .get_output_directory ()
557+
555558 @classmethod
556559 def INPUT_TYPES (s ):
557560 return {
@@ -565,7 +568,7 @@ def INPUT_TYPES(s):
565568 "prefix" : (
566569 "STRING" ,
567570 {
568- "default" : "trained_lora " ,
571+ "default" : "loras/ComfyUI_trained_lora " ,
569572 "tooltip" : "The prefix to use for the saved LoRA file." ,
570573 },
571574 ),
@@ -588,12 +591,13 @@ def INPUT_TYPES(s):
588591 OUTPUT_NODE = True
589592
590593 def save (self , lora , prefix , steps = None ):
591- date = datetime . datetime . now (). strftime ( "%Y%m%d_%H%M%S" )
594+ full_output_folder , filename , counter , subfolder , filename_prefix = folder_paths . get_save_image_path ( prefix , self . output_dir )
592595 if steps is None :
593- output_file = f"models/loras/ { prefix } _{ date } _lora .safetensors"
596+ output_checkpoint = f"{ filename } _{ counter :05 } _ .safetensors"
594597 else :
595- output_file = f"models/loras/{ prefix } _{ steps } _steps_{ date } _lora.safetensors"
596- safetensors .torch .save_file (lora , output_file )
598+ output_checkpoint = f"{ filename } _{ steps } _steps_{ counter :05} _.safetensors"
599+ output_checkpoint = os .path .join (full_output_folder , output_checkpoint )
600+ safetensors .torch .save_file (lora , output_checkpoint )
597601 return {}
598602
599603
0 commit comments