|
64 | 64 | "# save the model\n",
|
65 | 65 | "model.save_pretrained(\n",
|
66 | 66 | " \"saved-model-dir/unet-with-metadata/\",\n",
|
67 |
| - "\n", |
68 | 67 | " # additional information to be saved with the model\n",
|
69 | 68 | " # only \"dataset\" and \"metrics\" are supported\n",
|
70 | 69 | " dataset=\"PASCAL VOC\", # only string name is supported\n",
|
71 |
| - " metrics={ # should be a dictionary with metric name as key and metric value as value\n", |
| 70 | + " metrics={ # should be a dictionary with metric name as key and metric value as value\n", |
72 | 71 | " \"mIoU\": 0.95,\n",
|
73 |
| - " \"accuracy\": 0.96\n", |
74 |
| - " }\n", |
| 72 | + " \"accuracy\": 0.96,\n", |
| 73 | + " },\n", |
75 | 74 | ")"
|
76 | 75 | ]
|
77 | 76 | },
|
|
222 | 221 | "# save the model and share it on the HF Hub (https://huggingface.co/models)\n",
|
223 | 222 | "model.save_pretrained(\n",
|
224 | 223 | " \"qubvel-hf/unet-with-metadata/\",\n",
|
225 |
| - " push_to_hub=True, # <---------- push the model to the hub\n", |
226 |
| - " private=False, # <---------- make the model private or or public\n", |
| 224 | + " push_to_hub=True, # <---------- push the model to the hub\n", |
| 225 | + " private=False, # <---------- make the model private or or public\n", |
227 | 226 | " dataset=\"PASCAL VOC\",\n",
|
228 |
| - " metrics={\n", |
229 |
| - " \"mIoU\": 0.95,\n", |
230 |
| - " \"accuracy\": 0.96\n", |
231 |
| - " }\n", |
| 227 | + " metrics={\"mIoU\": 0.95, \"accuracy\": 0.96},\n", |
232 | 228 | ")\n",
|
233 | 229 | "\n",
|
234 | 230 | "# see result here https://huggingface.co/qubvel-hf/unet-with-metadata"
|
|
267 | 263 | "outputs": [],
|
268 | 264 | "source": [
|
269 | 265 | "# define a preprocessing transform for image that would be used during inference\n",
|
270 |
| - "preprocessing_transform = A.Compose([\n", |
271 |
| - " A.Resize(256, 256),\n", |
272 |
| - " A.Normalize()\n", |
273 |
| - "])\n", |
| 266 | + "preprocessing_transform = A.Compose([A.Resize(256, 256), A.Normalize()])\n", |
274 | 267 | "\n",
|
275 | 268 | "model = smp.Unet()"
|
276 | 269 | ]
|
|
367 | 360 | "# You can also save training augmentations to the Hub too (and load it back)!\n",
|
368 | 361 | "#! Just make sure to provide key=\"train\" when saving and loading the augmentations.\n",
|
369 | 362 | "\n",
|
370 |
| - "train_augmentations = A.Compose([\n", |
371 |
| - " A.HorizontalFlip(p=0.5),\n", |
372 |
| - " A.RandomBrightnessContrast(p=0.2),\n", |
373 |
| - " A.ShiftScaleRotate(p=0.5),\n", |
374 |
| - "])\n", |
| 363 | + "train_augmentations = A.Compose(\n", |
| 364 | + " [\n", |
| 365 | + " A.HorizontalFlip(p=0.5),\n", |
| 366 | + " A.RandomBrightnessContrast(p=0.2),\n", |
| 367 | + " A.ShiftScaleRotate(p=0.5),\n", |
| 368 | + " ]\n", |
| 369 | + ")\n", |
375 | 370 | "\n",
|
376 |
| - "train_augmentations.save_pretrained(directory_or_repo_on_the_hub, key=\"train\", push_to_hub=True)\n", |
| 371 | + "train_augmentations.save_pretrained(\n", |
| 372 | + " directory_or_repo_on_the_hub, key=\"train\", push_to_hub=True\n", |
| 373 | + ")\n", |
377 | 374 | "\n",
|
378 |
| - "restored_train_augmentations = A.Compose.from_pretrained(directory_or_repo_on_the_hub, key=\"train\")\n", |
| 375 | + "restored_train_augmentations = A.Compose.from_pretrained(\n", |
| 376 | + " directory_or_repo_on_the_hub, key=\"train\"\n", |
| 377 | + ")\n", |
379 | 378 | "print(restored_train_augmentations)"
|
380 | 379 | ]
|
381 | 380 | },
|
|
0 commit comments