From 19156038df75b775816eeb5c83384717971beb5a Mon Sep 17 00:00:00 2001 From: Jyotirmay Khavasi <81622504+theory-in-progress@users.noreply.github.com> Date: Sat, 24 Jun 2023 02:40:14 +0530 Subject: [PATCH] Merge common and specific code-templates (#260) * Added functionality for merging of common and specific templates - Merges the common template into specific template - Uses replace in js to find the tag and replace with the common code - Uses ejs to render * Merge common and Specific code-templates - Modified the code in fetchTemplates so that any live updates from vue in genCode is not overwritten - Dynamically fetches the common code if the file exists, else proceeds with the code from specific file - Created new function mergeCode to merge codes from specific and common templates using ejs, the code tag being `#:::- replace_here :::#` - The function takes in strings of specific file and common file and returns the rendered code using ejs - The code tag is present in specific template which will be replaced with the code from common template * Handle error code-tag is present in a file but file is missing in Common-template - The if else statements throw an error when replace ejs code-tag is present in the specific file, but the common file is also missing in the common template - This handles the error by just replacing the replace_here tag with an empty string, since this tag is not defined for the second render * Update branch with changes from main * chore(deps): bump playwright-chromium from 1.33.0 to 1.35.1 (#255) Bumps [playwright-chromium](https://github.com/Microsoft/playwright) from 1.33.0 to 1.35.1. - [Release notes](https://github.com/Microsoft/playwright/releases) - [Commits](https://github.com/Microsoft/playwright/compare/v1.33.0...v1.35.1) --- updated-dependencies: - dependency-name: playwright-chromium dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> * chore(deps): bump semver from 7.3.5 to 7.5.2 (#254) Bumps [semver](https://github.com/npm/node-semver) from 7.3.5 to 7.5.2. - [Release notes](https://github.com/npm/node-semver/releases) - [Changelog](https://github.com/npm/node-semver/blob/main/CHANGELOG.md) - [Commits](https://github.com/npm/node-semver/compare/v7.3.5...v7.5.2) --- updated-dependencies: - dependency-name: semver dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: vfdev * chore(deps): bump ejs from 3.1.6 to 3.1.9 (#253) Bumps [ejs](https://github.com/mde/ejs) from 3.1.6 to 3.1.9. - [Release notes](https://github.com/mde/ejs/releases) - [Commits](https://github.com/mde/ejs/compare/v3.1.6...v3.1.9) --- updated-dependencies: - dependency-name: ejs dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> * chore(deps): bump prismjs from 1.26.0 to 1.29.0 (#245) Bumps [prismjs](https://github.com/PrismJS/prism) from 1.26.0 to 1.29.0. - [Release notes](https://github.com/PrismJS/prism/releases) - [Changelog](https://github.com/PrismJS/prism/blob/master/CHANGELOG.md) - [Commits](https://github.com/PrismJS/prism/compare/v1.26.0...v1.29.0) --- updated-dependencies: - dependency-name: prismjs dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> * chore(deps): bump @vitejs/plugin-vue from 2.1.0 to 2.3.4 (#251) Bumps [@vitejs/plugin-vue](https://github.com/vitejs/vite-plugin-vue/tree/HEAD/packages/plugin-vue) from 2.1.0 to 2.3.4. - [Release notes](https://github.com/vitejs/vite-plugin-vue/releases) - [Changelog](https://github.com/vitejs/vite-plugin-vue/blob/main/packages/plugin-vue/CHANGELOG.md) - [Commits](https://github.com/vitejs/vite-plugin-vue/commits/HEAD/packages/plugin-vue) --- updated-dependencies: - dependency-name: "@vitejs/plugin-vue" dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: vfdev * Restructured config (#243) * Restructured config - Restructured config so that arguments are defined in cofing.yaml * Fix main.py * MOdified tests according to new config restructuring * Updating remaining templates with restructured config * Update according to original config args * Configs for all the templates - Created new yaml files for testing the code - These are the test args that will be run when we run the tests * Modified tests according to new config structure * Fix typo * Correct backend argument to be passed in command line * Pass backend argument as a command line argument * Modifying the config structure in template-common --------- Co-authored-by: vfdev * chore(deps): bump prettier from 2.5.1 to 2.8.8 (#259) Bumps [prettier](https://github.com/prettier/prettier) from 2.5.1 to 2.8.8. - [Release notes](https://github.com/prettier/prettier/releases) - [Changelog](https://github.com/prettier/prettier/blob/main/CHANGELOG.md) - [Commits](https://github.com/prettier/prettier/compare/2.5.1...2.8.8) --- updated-dependencies: - dependency-name: prettier dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: vfdev --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: vfdev * Code format * Changed code tag to `from_template_common` * Refactoring the redundant/repeating code in utils.py - Removes the repeating code in the specific templates - The common code is stored in src/templates/template-common * Fix lint of utils.py * Refactoring the redundant/repeating code in config.yaml - Removes the repeating code in the specific templates - The common code is stored in src/templates/template-common * Refactoring the redundant/repeating code in main.py - Removes the repeating code in the specific templates - The common code is stored in src/templates/template-common * MOdify lint options - added min_lint - flake8 tests now run on the rendered code in dist-tests/ * Deleting the script check_copies.py and the command in workflow - We do not need the script now since the code from common and specific don't have an intersection now * Add lint in tests and modify min_lint in lint * Modify render with replace using js on vision-classification template * Install formatting tools in tests job * Change code tags from `#:::- from_template_common :::#` to `#::= from_template_common ::#` handle flake import errors - Changed the code tags to match `.replace()` - handled the flake import errors by including if else statements using ejs rendering * Modifying trainers.py to include the if else statements for unused imports * Removes any usort skip statements in the final code - Making replacement globally * Added usort skip statements for certain imports in trainers.py - Formatting * Modified tests, imports, workflow - Modified tests to check if rendered code is present and unzipped - Modified imports to include `Engine` - Modified workflow so that dir `dist-tests` are removed * Added type hints for functions in template-vision-segmentation/trainers.py * Formatting modifications - added __DEV_CONFIG__.json to prettierignore - start dev server on port 5000 - add F821 non imported objects to flake8 tests - utils Formatting - trainers Formatting --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: vfdev --- .github/workflows/ci.yml | 5 +- .prettierignore | 1 + package.json | 5 +- scripts/check_copies.py | 42 ---- scripts/run_code_style.sh | 6 +- src/store.js | 19 +- src/templates/template-common/main.py | 27 +-- src/templates/template-common/utils.py | 91 +++------ .../template-text-classification/config.yaml | 53 +---- .../template-text-classification/main.py | 26 +-- .../template-text-classification/utils.py | 187 +----------------- .../config.yaml | 53 +---- .../template-vision-classification/main.py | 26 +-- .../template-vision-classification/utils.py | 187 +----------------- .../template-vision-dcgan/config.yaml | 53 +---- src/templates/template-vision-dcgan/main.py | 26 +-- src/templates/template-vision-dcgan/utils.py | 187 +----------------- .../template-vision-segmentation/config.yaml | 53 +---- .../template-vision-segmentation/main.py | 26 +-- .../template-vision-segmentation/trainers.py | 4 +- .../template-vision-segmentation/utils.py | 186 +---------------- 21 files changed, 72 insertions(+), 1191 deletions(-) delete mode 100644 scripts/check_copies.py diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 88d8099b..400067f1 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -70,6 +70,7 @@ jobs: pip uninstall -y tqdm npm install -g pnpm pnpm i --frozen-lockfile --color + bash scripts/run_code_style.sh install # Show all installed dependencies pip list @@ -81,6 +82,7 @@ jobs: - run: pnpm build - run: pnpm test:ci - run: sh ./scripts/run_tests.sh unzip + - run: pnpm lint - name: 'Run ${{ matrix.template }} ${{ matrix.test }}' run: sh ./scripts/run_tests.sh ${{ matrix.test }} ${{ matrix.template }} @@ -108,5 +110,4 @@ jobs: - run: pip install -Uq pip wheel && bash scripts/run_code_style.sh install - run: npm install -g pnpm - run: pnpm i --frozen-lockfile --color - - run: pnpm lint - - run: python scripts/check_copies.py + - run: pnpm min_lint diff --git a/.prettierignore b/.prettierignore index dee70d2f..c6a18781 100644 --- a/.prettierignore +++ b/.prettierignore @@ -1,2 +1,3 @@ dist pnpm-lock.yaml +**/__DEV_CONFIG__.json \ No newline at end of file diff --git a/package.json b/package.json index 6ee1171c..ed45b773 100644 --- a/package.json +++ b/package.json @@ -2,13 +2,14 @@ "name": "@pytorch-ignite/code-generator", "version": "0.3.0", "scripts": { - "dev": "vite", + "dev": "vite --port 5000", "build": "vite build", "serve": "vite preview", - "test": "jest --color --runInBand", + "test": "rm -rf ./dist-tests && jest --color --runInBand", "test:ci": "start-server-and-test --expect 200 serve http://127.0.0.1:5000 test", "release": "node scripts/release.js", "fmt": "prettier --write . && bash scripts/run_code_style.sh fmt", + "min_lint": "prettier --check . && bash scripts/run_code_style.sh min_lint", "lint": "prettier --check . && bash scripts/run_code_style.sh lint" }, "dependencies": { diff --git a/scripts/check_copies.py b/scripts/check_copies.py deleted file mode 100644 index a3676e53..00000000 --- a/scripts/check_copies.py +++ /dev/null @@ -1,42 +0,0 @@ -# check copies of utils.py up-to-date or not - -from pathlib import Path - - -def check(fname): - red = "\033[31m" - green = "\033[32m" - reset = "\033[0m" - - with open(f"./src/templates/template-common/{fname}", "r") as f: - common = f.readlines() - - path = Path("./src/templates/") - - for file in path.rglob(f"**/{fname}"): - if str(file).find("common") > -1: - continue - else: - template = file.read_text("utf-8") - - match = [] - for c in common: - match.append(template.find(c) > -1) - - if all(match): - print(green, "Matched", file, reset) - else: - print(red, "Unmatched", file, reset) - exit(1) - - -if __name__ == "__main__": - check("config.yaml") - print() - check("main.py") - print() - check("README.md") - print() - check("requirements.txt") - print() - check("utils.py") diff --git a/scripts/run_code_style.sh b/scripts/run_code_style.sh index 6a074b36..be1e2742 100755 --- a/scripts/run_code_style.sh +++ b/scripts/run_code_style.sh @@ -3,8 +3,12 @@ set -xeu if [ $1 == "lint" ]; then + # Check that ./dist-tests/ exists and code is unzipped + ls ./dist-tests/vision-classification-all/main.py + ufmt diff . + flake8 --select F401,F821 ./dist-tests # find unused imports and non imported objects +elif [ $1 == "min_lint" ]; then ufmt diff . - flake8 --select F401 . # find unused imports elif [ $1 == "fmt" ]; then ufmt format . elif [ $1 == "install" ]; then diff --git a/src/store.js b/src/store.js index b8197a35..4e1f8c3b 100644 --- a/src/store.js +++ b/src/store.js @@ -59,6 +59,15 @@ export function saveConfig(key, value) { } } +// merges the code from the common and specific files using ejs +function mergeCode(specificFileText, commonFileText) { + const replaced = specificFileText.replace( + /#::= from_template_common ::#/g, + commonFileText + ) + return replaced +} + // render the code if there are fetched files for current selected template export function genCode() { const currentFiles = files[store.config.template] @@ -78,6 +87,7 @@ export function genCode() { ) // trim ` #` .replace(/\s{4}#$/gim, '') + .replace(/ # usort: skip/g, '') } if (isDev) { store.code[__DEV_CONFIG_FILE__] = @@ -98,7 +108,14 @@ export async function fetchTemplates(template) { files[template] = {} for (const filename of templates[template]) { const response = await fetch(`${url}/${template}/${filename}`) - files[template][filename] = await response.text() + const text_specific = await response.text() + // Dynamically fetch the common templates-code, if the file exists in common, + // then render the replace_here code tag using ejs template + // If the file doesn't exist in common, then it will fetch an empty string + // then the code tag is replaced with empty string + const res_common = await fetch(`${url}/template-common/${filename}`) + const text_common = await res_common.text() + files[template][filename] = mergeCode(text_specific, text_common) } // calling genCode explicitly here diff --git a/src/templates/template-common/main.py b/src/templates/template-common/main.py index 07e91314..49e6063b 100644 --- a/src/templates/template-common/main.py +++ b/src/templates/template-common/main.py @@ -1,26 +1,3 @@ -ckpt_handler_train, ckpt_handler_eval = setup_handlers( - trainer, evaluator, config, to_save_train, to_save_eval -) - -#::: if (it.logger) { :::# -if rank == 0: - exp_logger.close() -#::: } :::# - -#::: if (it.save_training || it.save_evaluation) { :::# -# show last checkpoint names -logger.info( - "Last training checkpoint name - %s", - ckpt_handler_train.last_checkpoint, -) - -logger.info( - "Last evaluation checkpoint name - %s", - ckpt_handler_eval.last_checkpoint, -) -#::: } :::# - - # main entrypoint def main(): config = setup_config() @@ -42,3 +19,7 @@ def main(): with idist.Parallel(config.backend) as p: p.run(run, config=config) #::: } :::# + + +if __name__ == "__main__": + main() diff --git a/src/templates/template-common/utils.py b/src/templates/template-common/utils.py index 137a6199..43432b69 100644 --- a/src/templates/template-common/utils.py +++ b/src/templates/template-common/utils.py @@ -10,11 +10,34 @@ import yaml from ignite.contrib.engines import common from ignite.engine import Engine + +#::: if (it.save_training || it.save_evaluation || it.patience || it.terminate_on_nan || it.limit_sec) { :::# from ignite.engine.events import Events -from ignite.handlers import Checkpoint, DiskSaver, global_step_from_engine + +#::: } :::# +#::: if (it.save_training || it.save_evaluation) { :::# +from ignite.handlers import ( + Checkpoint, + DiskSaver, + global_step_from_engine, +) # usort: skip + +#::: } else { :::# +from ignite.handlers import Checkpoint + +#::: } :::# +#::: if (it.patience) { :::# from ignite.handlers.early_stopping import EarlyStopping + +#::: } :::# +#::: if (it.terminate_on_nan) { :::# from ignite.handlers.terminate_on_nan import TerminateOnNan + +#::: } :::# +#::: if (it.limit_sec) { :::# from ignite.handlers.time_limit import TimeLimit + +#::: } :::# from ignite.utils import setup_logger @@ -141,72 +164,6 @@ def setup_logging(config: Any) -> Logger: return logger -#::: if (it.save_training || it.save_evaluation || it.patience || it.terminate_on_nan || it.limit_sec) { :::# - - -def setup_handlers( - trainer: Engine, - evaluator: Engine, - config: Any, - to_save_train: Optional[dict] = None, - to_save_eval: Optional[dict] = None, -): - """Setup Ignite handlers.""" - - ckpt_handler_train = ckpt_handler_eval = None - #::: if (it.save_training || it.save_evaluation) { :::# - # checkpointing - saver = DiskSaver(config.output_dir / "checkpoints", require_empty=False) - #::: if (it.save_training) { :::# - ckpt_handler_train = Checkpoint( - to_save_train, - saver, - filename_prefix=config.filename_prefix, - n_saved=config.n_saved, - ) - trainer.add_event_handler( - Events.ITERATION_COMPLETED(every=config.save_every_iters), - ckpt_handler_train, - ) - #::: } :::# - #::: if (it.save_evaluation) { :::# - global_step_transform = None - if to_save_train.get("trainer", None) is not None: - global_step_transform = global_step_from_engine(to_save_train["trainer"]) - ckpt_handler_eval = Checkpoint( - to_save_eval, - saver, - filename_prefix="best", - n_saved=config.n_saved, - global_step_transform=global_step_transform, - ) - evaluator.add_event_handler(Events.EPOCH_COMPLETED(every=1), ckpt_handler_eval) - #::: } :::# - #::: } :::# - - #::: if (it.patience) { :::# - # early stopping - - es = EarlyStopping(config.patience, score_fn, trainer) - evaluator.add_event_handler(Events.EPOCH_COMPLETED, es) - #::: } :::# - - #::: if (it.terminate_on_nan) { :::# - # terminate on nan - trainer.add_event_handler(Events.ITERATION_COMPLETED, TerminateOnNan()) - #::: } :::# - - #::: if (it.limit_sec) { :::# - # time limit - trainer.add_event_handler(Events.ITERATION_COMPLETED, TimeLimit(config.limit_sec)) - #::: } :::# - #::: if (it.save_training || it.save_evaluation) { :::# - return ckpt_handler_train, ckpt_handler_eval - #::: } :::# - - -#::: } :::# - #::: if (it.logger) { :::# diff --git a/src/templates/template-text-classification/config.yaml b/src/templates/template-text-classification/config.yaml index 5b6899b0..dac8ae40 100644 --- a/src/templates/template-text-classification/config.yaml +++ b/src/templates/template-text-classification/config.yaml @@ -1,13 +1,4 @@ -seed: 666 -data_path: ./ -train_batch_size: 32 -eval_batch_size: 32 -num_workers: 4 -max_epochs: 20 -train_epoch_length: 1000 -eval_epoch_length: 1000 -use_amp: false -debug: false +#::= from_template_common ::# model: bert-base-uncased model_dir: /tmp/model tokenizer_dir: /tmp/tokenizer @@ -18,45 +9,3 @@ weight_decay: 0.01 num_warmup_epochs: 0 max_length: 256 lr: 0.00005 - -#::: if (it.dist === 'spawn') { :::# -# distributed spawn -nproc_per_node: #:::= it.nproc_per_node :::# -#::: if (it.nnodes) { :::# -# distributed multi node spawn -nnodes: #:::= it.nnodes :::# -#::: if (it.nnodes > 1) { :::# -node_rank: 0 -master_addr: #:::= it.master_addr :::# -master_port: #:::= it.master_port :::# -#::: } :::# -#::: } :::# -#::: } :::# - -#::: if (it.filename_prefix) { :::# -filename_prefix: #:::= it.filename_prefix :::# -#::: } :::# - -#::: if (it.n_saved) { :::# -n_saved: #:::= it.n_saved :::# -#::: } :::# - -#::: if (it.save_every_iters) { :::# -save_every_iters: #:::= it.save_every_iters :::# -#::: } :::# - -#::: if (it.patience) { :::# -patience: #:::= it.patience :::# -#::: } :::# - -#::: if (it.limit_sec) { :::# -limit_sec: #:::= it.limit_sec :::# -#::: } :::# - -#::: if (it.output_dir) { :::# -output_dir: #:::= it.output_dir :::# -#::: } :::# - -#::: if (it.log_every_iters) { :::# -log_every_iters: #:::= it.log_every_iters :::# -#::: } :::# diff --git a/src/templates/template-text-classification/main.py b/src/templates/template-text-classification/main.py index d6c3487a..2e2f36d5 100644 --- a/src/templates/template-text-classification/main.py +++ b/src/templates/template-text-classification/main.py @@ -173,28 +173,4 @@ def _(): #::: } :::# -# main entrypoint -def main(): - config = setup_config() - #::: if (it.dist === 'spawn') { :::# - #::: if (it.nproc_per_node && it.nnodes > 1 && it.master_addr && it.master_port) { :::# - kwargs = { - "nproc_per_node": config.nproc_per_node, - "nnodes": config.nnodes, - "node_rank": config.node_rank, - "master_addr": config.master_addr, - "master_port": config.master_port, - } - #::: } else if (it.nproc_per_node) { :::# - kwargs = {"nproc_per_node": config.nproc_per_node} - #::: } :::# - with idist.Parallel(config.backend, **kwargs) as p: - p.run(run, config=config) - #::: } else { :::# - with idist.Parallel(config.backend) as p: - p.run(run, config=config) - #::: } :::# - - -if __name__ == "__main__": - main() +#::= from_template_common ::# diff --git a/src/templates/template-text-classification/utils.py b/src/templates/template-text-classification/utils.py index 9a2b79dd..d87c8586 100644 --- a/src/templates/template-text-classification/utils.py +++ b/src/templates/template-text-classification/utils.py @@ -1,145 +1,4 @@ -import logging -from argparse import ArgumentParser -from datetime import datetime -from logging import Logger -from pathlib import Path -from typing import Any, Mapping, Optional, Union - -import ignite.distributed as idist -import torch -import yaml -from ignite.contrib.engines import common -from ignite.engine import Engine -from ignite.engine.events import Events -from ignite.handlers import Checkpoint, DiskSaver, global_step_from_engine -from ignite.handlers.early_stopping import EarlyStopping -from ignite.handlers.terminate_on_nan import TerminateOnNan -from ignite.handlers.time_limit import TimeLimit -from ignite.utils import setup_logger - - -def get_default_parser(): - parser = ArgumentParser() - parser.add_argument("config", type=Path, help="Config file path") - parser.add_argument( - "--backend", - default=None, - choices=["nccl", "gloo"], - type=str, - help="DDP backend", - ) - return parser - - -def setup_config(parser=None): - if parser is None: - parser = get_default_parser() - - args = parser.parse_args() - config_path = args.config - - with open(config_path, "r") as f: - config = yaml.safe_load(f.read()) - - for k, v in config.items(): - setattr(args, k, v) - - return args - - -def log_metrics(engine: Engine, tag: str) -> None: - """Log `engine.state.metrics` with given `engine` and `tag`. - - Parameters - ---------- - engine - instance of `Engine` which metrics to log. - tag - a string to add at the start of output. - """ - metrics_format = "{0} [{1}/{2}]: {3}".format( - tag, engine.state.epoch, engine.state.iteration, engine.state.metrics - ) - engine.logger.info(metrics_format) - - -def resume_from( - to_load: Mapping, - checkpoint_fp: Union[str, Path], - logger: Logger, - strict: bool = True, - model_dir: Optional[str] = None, -) -> None: - """Loads state dict from a checkpoint file to resume the training. - - Parameters - ---------- - to_load - a dictionary with objects, e.g. {“model”: model, “optimizer”: optimizer, ...} - checkpoint_fp - path to the checkpoint file - logger - to log info about resuming from a checkpoint - strict - whether to strictly enforce that the keys in `state_dict` match the keys - returned by this module’s `state_dict()` function. Default: True - model_dir - directory in which to save the object - """ - if isinstance(checkpoint_fp, str) and checkpoint_fp.startswith("https://"): - checkpoint = torch.hub.load_state_dict_from_url( - checkpoint_fp, - model_dir=model_dir, - map_location="cpu", - check_hash=True, - ) - else: - if isinstance(checkpoint_fp, str): - checkpoint_fp = Path(checkpoint_fp) - - if not checkpoint_fp.exists(): - raise FileNotFoundError(f"Given {str(checkpoint_fp)} does not exist.") - checkpoint = torch.load(checkpoint_fp, map_location="cpu") - - Checkpoint.load_objects(to_load=to_load, checkpoint=checkpoint, strict=strict) - logger.info("Successfully resumed from a checkpoint: %s", checkpoint_fp) - - -def setup_output_dir(config: Any, rank: int) -> Path: - """Create output folder.""" - if rank == 0: - now = datetime.now().strftime("%Y%m%d-%H%M%S") - name = f"{now}-backend-{config.backend}-lr-{config.lr}" - path = Path(config.output_dir, name) - path.mkdir(parents=True, exist_ok=True) - config.output_dir = path.as_posix() - - return Path(idist.broadcast(config.output_dir, src=0)) - - -def setup_logging(config: Any) -> Logger: - """Setup logger with `ignite.utils.setup_logger()`. - - Parameters - ---------- - config - config object. config has to contain `verbose` and `output_dir` attribute. - - Returns - ------- - logger - an instance of `Logger` - """ - green = "\033[32m" - reset = "\033[0m" - logger = setup_logger( - name=f"{green}[ignite]{reset}", - level=logging.DEBUG if config.debug else logging.INFO, - format="%(name)s: %(message)s", - filepath=config.output_dir / "training-info.log", - ) - return logger - +#::= from_template_common ::# #::: if (it.save_training || it.save_evaluation || it.patience || it.terminate_on_nan || it.limit_sec) { :::# @@ -209,50 +68,6 @@ def score_fn(engine: Engine): #::: } :::# -#::: } :::# - -#::: if (it.logger) { :::# - - -def setup_exp_logging(config, trainer, optimizers, evaluators): - """Setup Experiment Tracking logger from Ignite.""" - - #::: if (it.logger === 'clearml') { :::# - logger = common.setup_clearml_logging( - trainer, optimizers, evaluators, config.log_every_iters - ) - #::: } else if (it.logger === 'mlflow') { :::# - logger = common.setup_mlflow_logging( - trainer, optimizers, evaluators, config.log_every_iters - ) - #::: } else if (it.logger === 'neptune') { :::# - logger = common.setup_neptune_logging( - trainer, optimizers, evaluators, config.log_every_iters - ) - #::: } else if (it.logger === 'polyaxon') { :::# - logger = common.setup_plx_logging( - trainer, optimizers, evaluators, config.log_every_iters - ) - #::: } else if (it.logger === 'tensorboard') { :::# - logger = common.setup_tb_logging( - config.output_dir, - trainer, - optimizers, - evaluators, - config.log_every_iters, - ) - #::: } else if (it.logger === 'visdom') { :::# - logger = common.setup_visdom_logging( - trainer, optimizers, evaluators, config.log_every_iters - ) - #::: } else if (it.logger === 'wandb') { :::# - logger = common.setup_wandb_logging( - trainer, optimizers, evaluators, config.log_every_iters - ) - #::: } :::# - return logger - - #::: } :::# diff --git a/src/templates/template-vision-classification/config.yaml b/src/templates/template-vision-classification/config.yaml index 5be3e903..5918a3af 100644 --- a/src/templates/template-vision-classification/config.yaml +++ b/src/templates/template-vision-classification/config.yaml @@ -1,54 +1,3 @@ -seed: 666 -data_path: ./ -train_batch_size: 32 -eval_batch_size: 32 -num_workers: 4 -max_epochs: 20 -train_epoch_length: 1000 -eval_epoch_length: 1000 +#::= from_template_common ::# lr: 0.0001 -use_amp: false -debug: false model: resnet18 - -#::: if (it.dist === 'spawn') { :::# -# distributed spawn -nproc_per_node: #:::= it.nproc_per_node :::# -#::: if (it.nnodes) { :::# -# distributed multi node spawn -nnodes: #:::= it.nnodes :::# -#::: if (it.nnodes > 1) { :::# -node_rank: 0 -master_addr: #:::= it.master_addr :::# -master_port: #:::= it.master_port :::# -#::: } :::# -#::: } :::# -#::: } :::# - -#::: if (it.filename_prefix) { :::# -filename_prefix: #:::= it.filename_prefix :::# -#::: } :::# - -#::: if (it.n_saved) { :::# -n_saved: #:::= it.n_saved :::# -#::: } :::# - -#::: if (it.save_every_iters) { :::# -save_every_iters: #:::= it.save_every_iters :::# -#::: } :::# - -#::: if (it.patience) { :::# -patience: #:::= it.patience :::# -#::: } :::# - -#::: if (it.limit_sec) { :::# -limit_sec: #:::= it.limit_sec :::# -#::: } :::# - -#::: if (it.output_dir) { :::# -output_dir: #:::= it.output_dir :::# -#::: } :::# - -#::: if (it.log_every_iters) { :::# -log_every_iters: #:::= it.log_every_iters :::# -#::: } :::# diff --git a/src/templates/template-vision-classification/main.py b/src/templates/template-vision-classification/main.py index 9f3e0033..9fda98ad 100644 --- a/src/templates/template-vision-classification/main.py +++ b/src/templates/template-vision-classification/main.py @@ -129,28 +129,4 @@ def _(): #::: } :::# -# main entrypoint -def main(): - config = setup_config() - #::: if (it.dist === 'spawn') { :::# - #::: if (it.nproc_per_node && it.nnodes > 1 && it.master_addr && it.master_port) { :::# - kwargs = { - "nproc_per_node": config.nproc_per_node, - "nnodes": config.nnodes, - "node_rank": config.node_rank, - "master_addr": config.master_addr, - "master_port": config.master_port, - } - #::: } else if (it.nproc_per_node) { :::# - kwargs = {"nproc_per_node": config.nproc_per_node} - #::: } :::# - with idist.Parallel(config.backend, **kwargs) as p: - p.run(run, config=config) - #::: } else { :::# - with idist.Parallel(config.backend) as p: - p.run(run, config=config) - #::: } :::# - - -if __name__ == "__main__": - main() +#::= from_template_common ::# diff --git a/src/templates/template-vision-classification/utils.py b/src/templates/template-vision-classification/utils.py index e1055bdb..e6812d5a 100644 --- a/src/templates/template-vision-classification/utils.py +++ b/src/templates/template-vision-classification/utils.py @@ -1,145 +1,4 @@ -import logging -from argparse import ArgumentParser -from datetime import datetime -from logging import Logger -from pathlib import Path -from typing import Any, Mapping, Optional, Union - -import ignite.distributed as idist -import torch -import yaml -from ignite.contrib.engines import common -from ignite.engine import Engine -from ignite.engine.events import Events -from ignite.handlers import Checkpoint, DiskSaver, global_step_from_engine -from ignite.handlers.early_stopping import EarlyStopping -from ignite.handlers.terminate_on_nan import TerminateOnNan -from ignite.handlers.time_limit import TimeLimit -from ignite.utils import setup_logger - - -def get_default_parser(): - parser = ArgumentParser() - parser.add_argument("config", type=Path, help="Config file path") - parser.add_argument( - "--backend", - default=None, - choices=["nccl", "gloo"], - type=str, - help="DDP backend", - ) - return parser - - -def setup_config(parser=None): - if parser is None: - parser = get_default_parser() - - args = parser.parse_args() - config_path = args.config - - with open(config_path, "r") as f: - config = yaml.safe_load(f.read()) - - for k, v in config.items(): - setattr(args, k, v) - - return args - - -def log_metrics(engine: Engine, tag: str) -> None: - """Log `engine.state.metrics` with given `engine` and `tag`. - - Parameters - ---------- - engine - instance of `Engine` which metrics to log. - tag - a string to add at the start of output. - """ - metrics_format = "{0} [{1}/{2}]: {3}".format( - tag, engine.state.epoch, engine.state.iteration, engine.state.metrics - ) - engine.logger.info(metrics_format) - - -def resume_from( - to_load: Mapping, - checkpoint_fp: Union[str, Path], - logger: Logger, - strict: bool = True, - model_dir: Optional[str] = None, -) -> None: - """Loads state dict from a checkpoint file to resume the training. - - Parameters - ---------- - to_load - a dictionary with objects, e.g. {“model”: model, “optimizer”: optimizer, ...} - checkpoint_fp - path to the checkpoint file - logger - to log info about resuming from a checkpoint - strict - whether to strictly enforce that the keys in `state_dict` match the keys - returned by this module’s `state_dict()` function. Default: True - model_dir - directory in which to save the object - """ - if isinstance(checkpoint_fp, str) and checkpoint_fp.startswith("https://"): - checkpoint = torch.hub.load_state_dict_from_url( - checkpoint_fp, - model_dir=model_dir, - map_location="cpu", - check_hash=True, - ) - else: - if isinstance(checkpoint_fp, str): - checkpoint_fp = Path(checkpoint_fp) - - if not checkpoint_fp.exists(): - raise FileNotFoundError(f"Given {str(checkpoint_fp)} does not exist.") - checkpoint = torch.load(checkpoint_fp, map_location="cpu") - - Checkpoint.load_objects(to_load=to_load, checkpoint=checkpoint, strict=strict) - logger.info("Successfully resumed from a checkpoint: %s", checkpoint_fp) - - -def setup_output_dir(config: Any, rank: int) -> Path: - """Create output folder.""" - if rank == 0: - now = datetime.now().strftime("%Y%m%d-%H%M%S") - name = f"{now}-backend-{config.backend}-lr-{config.lr}" - path = Path(config.output_dir, name) - path.mkdir(parents=True, exist_ok=True) - config.output_dir = path.as_posix() - - return Path(idist.broadcast(config.output_dir, src=0)) - - -def setup_logging(config: Any) -> Logger: - """Setup logger with `ignite.utils.setup_logger()`. - - Parameters - ---------- - config - config object. config has to contain `verbose` and `output_dir` attribute. - - Returns - ------- - logger - an instance of `Logger` - """ - green = "\033[32m" - reset = "\033[0m" - logger = setup_logger( - name=f"{green}[ignite]{reset}", - level=logging.DEBUG if config.debug else logging.INFO, - format="%(name)s: %(message)s", - filepath=config.output_dir / "training-info.log", - ) - return logger - +#::= from_template_common ::# #::: if (it.save_training || it.save_evaluation || it.patience || it.terminate_on_nan || it.limit_sec) { :::# @@ -210,47 +69,3 @@ def score_fn(engine: Engine): #::: } :::# - -#::: if (it.logger) { :::# - - -def setup_exp_logging(config, trainer, optimizers, evaluators): - """Setup Experiment Tracking logger from Ignite.""" - - #::: if (it.logger === 'clearml') { :::# - logger = common.setup_clearml_logging( - trainer, optimizers, evaluators, config.log_every_iters - ) - #::: } else if (it.logger === 'mlflow') { :::# - logger = common.setup_mlflow_logging( - trainer, optimizers, evaluators, config.log_every_iters - ) - #::: } else if (it.logger === 'neptune') { :::# - logger = common.setup_neptune_logging( - trainer, optimizers, evaluators, config.log_every_iters - ) - #::: } else if (it.logger === 'polyaxon') { :::# - logger = common.setup_plx_logging( - trainer, optimizers, evaluators, config.log_every_iters - ) - #::: } else if (it.logger === 'tensorboard') { :::# - logger = common.setup_tb_logging( - config.output_dir, - trainer, - optimizers, - evaluators, - config.log_every_iters, - ) - #::: } else if (it.logger === 'visdom') { :::# - logger = common.setup_visdom_logging( - trainer, optimizers, evaluators, config.log_every_iters - ) - #::: } else if (it.logger === 'wandb') { :::# - logger = common.setup_wandb_logging( - trainer, optimizers, evaluators, config.log_every_iters - ) - #::: } :::# - return logger - - -#::: } :::# diff --git a/src/templates/template-vision-dcgan/config.yaml b/src/templates/template-vision-dcgan/config.yaml index f97dc602..c81c8b85 100644 --- a/src/templates/template-vision-dcgan/config.yaml +++ b/src/templates/template-vision-dcgan/config.yaml @@ -1,56 +1,5 @@ -seed: 666 -data_path: ./ -train_batch_size: 32 -eval_batch_size: 32 -num_workers: 4 -max_epochs: 20 -train_epoch_length: 1000 -eval_epoch_length: 1000 +#::= from_template_common ::# lr: 0.0001 -use_amp: false -debug: false z_dim: 100 d_filters: 64 g_filters: 64 - -#::: if (it.dist === 'spawn') { :::# -# distributed spawn -nproc_per_node: #:::= it.nproc_per_node :::# -#::: if (it.nnodes) { :::# -# distributed multi node spawn -nnodes: #:::= it.nnodes :::# -#::: if (it.nnodes > 1) { :::# -node_rank: 0 -master_addr: #:::= it.master_addr :::# -master_port: #:::= it.master_port :::# -#::: } :::# -#::: } :::# -#::: } :::# - -#::: if (it.filename_prefix) { :::# -filename_prefix: #:::= it.filename_prefix :::# -#::: } :::# - -#::: if (it.n_saved) { :::# -n_saved: #:::= it.n_saved :::# -#::: } :::# - -#::: if (it.save_every_iters) { :::# -save_every_iters: #:::= it.save_every_iters :::# -#::: } :::# - -#::: if (it.patience) { :::# -patience: #:::= it.patience :::# -#::: } :::# - -#::: if (it.limit_sec) { :::# -limit_sec: #:::= it.limit_sec :::# -#::: } :::# - -#::: if (it.output_dir) { :::# -output_dir: #:::= it.output_dir :::# -#::: } :::# - -#::: if (it.log_every_iters) { :::# -log_every_iters: #:::= it.log_every_iters :::# -#::: } :::# diff --git a/src/templates/template-vision-dcgan/main.py b/src/templates/template-vision-dcgan/main.py index 6ac8af8b..a2915be7 100644 --- a/src/templates/template-vision-dcgan/main.py +++ b/src/templates/template-vision-dcgan/main.py @@ -181,28 +181,4 @@ def _(): #::: } :::# -# main entrypoint -def main(): - config = setup_config() - #::: if (it.dist === 'spawn') { :::# - #::: if (it.nproc_per_node && it.nnodes > 1 && it.master_addr && it.master_port) { :::# - kwargs = { - "nproc_per_node": config.nproc_per_node, - "nnodes": config.nnodes, - "node_rank": config.node_rank, - "master_addr": config.master_addr, - "master_port": config.master_port, - } - #::: } else if (it.nproc_per_node) { :::# - kwargs = {"nproc_per_node": config.nproc_per_node} - #::: } :::# - with idist.Parallel(config.backend, **kwargs) as p: - p.run(run, config=config) - #::: } else { :::# - with idist.Parallel(config.backend) as p: - p.run(run, config=config) - #::: } :::# - - -if __name__ == "__main__": - main() +#::= from_template_common ::# diff --git a/src/templates/template-vision-dcgan/utils.py b/src/templates/template-vision-dcgan/utils.py index 00efb724..a75cb9bd 100644 --- a/src/templates/template-vision-dcgan/utils.py +++ b/src/templates/template-vision-dcgan/utils.py @@ -1,145 +1,4 @@ -import logging -from argparse import ArgumentParser -from datetime import datetime -from logging import Logger -from pathlib import Path -from typing import Any, Mapping, Optional, Union - -import ignite.distributed as idist -import torch -import yaml -from ignite.contrib.engines import common -from ignite.engine import Engine -from ignite.engine.events import Events -from ignite.handlers import Checkpoint, DiskSaver, global_step_from_engine -from ignite.handlers.early_stopping import EarlyStopping -from ignite.handlers.terminate_on_nan import TerminateOnNan -from ignite.handlers.time_limit import TimeLimit -from ignite.utils import setup_logger - - -def get_default_parser(): - parser = ArgumentParser() - parser.add_argument("config", type=Path, help="Config file path") - parser.add_argument( - "--backend", - default=None, - choices=["nccl", "gloo"], - type=str, - help="DDP backend", - ) - return parser - - -def setup_config(parser=None): - if parser is None: - parser = get_default_parser() - - args = parser.parse_args() - config_path = args.config - - with open(config_path, "r") as f: - config = yaml.safe_load(f.read()) - - for k, v in config.items(): - setattr(args, k, v) - - return args - - -def log_metrics(engine: Engine, tag: str) -> None: - """Log `engine.state.metrics` with given `engine` and `tag`. - - Parameters - ---------- - engine - instance of `Engine` which metrics to log. - tag - a string to add at the start of output. - """ - metrics_format = "{0} [{1}/{2}]: {3}".format( - tag, engine.state.epoch, engine.state.iteration, engine.state.metrics - ) - engine.logger.info(metrics_format) - - -def resume_from( - to_load: Mapping, - checkpoint_fp: Union[str, Path], - logger: Logger, - strict: bool = True, - model_dir: Optional[str] = None, -) -> None: - """Loads state dict from a checkpoint file to resume the training. - - Parameters - ---------- - to_load - a dictionary with objects, e.g. {“model”: model, “optimizer”: optimizer, ...} - checkpoint_fp - path to the checkpoint file - logger - to log info about resuming from a checkpoint - strict - whether to strictly enforce that the keys in `state_dict` match the keys - returned by this module’s `state_dict()` function. Default: True - model_dir - directory in which to save the object - """ - if isinstance(checkpoint_fp, str) and checkpoint_fp.startswith("https://"): - checkpoint = torch.hub.load_state_dict_from_url( - checkpoint_fp, - model_dir=model_dir, - map_location="cpu", - check_hash=True, - ) - else: - if isinstance(checkpoint_fp, str): - checkpoint_fp = Path(checkpoint_fp) - - if not checkpoint_fp.exists(): - raise FileNotFoundError(f"Given {str(checkpoint_fp)} does not exist.") - checkpoint = torch.load(checkpoint_fp, map_location="cpu") - - Checkpoint.load_objects(to_load=to_load, checkpoint=checkpoint, strict=strict) - logger.info("Successfully resumed from a checkpoint: %s", checkpoint_fp) - - -def setup_output_dir(config: Any, rank: int) -> Path: - """Create output folder.""" - if rank == 0: - now = datetime.now().strftime("%Y%m%d-%H%M%S") - name = f"{now}-backend-{config.backend}-lr-{config.lr}" - path = Path(config.output_dir, name) - path.mkdir(parents=True, exist_ok=True) - config.output_dir = path.as_posix() - - return Path(idist.broadcast(config.output_dir, src=0)) - - -def setup_logging(config: Any) -> Logger: - """Setup logger with `ignite.utils.setup_logger()`. - - Parameters - ---------- - config - config object. config has to contain `verbose` and `output_dir` attribute. - - Returns - ------- - logger - an instance of `Logger` - """ - green = "\033[32m" - reset = "\033[0m" - logger = setup_logger( - name=f"{green}[ignite]{reset}", - level=logging.DEBUG if config.debug else logging.INFO, - format="%(name)s: %(message)s", - filepath=config.output_dir / "training-info.log", - ) - return logger - +#::= from_template_common ::# #::: if (it.save_training || it.save_evaluation || it.patience || it.terminate_on_nan || it.limit_sec) { :::# @@ -210,47 +69,3 @@ def score_fn(engine: Engine): #::: } :::# - -#::: if (it.logger) { :::# - - -def setup_exp_logging(config, trainer, optimizers, evaluators): - """Setup Experiment Tracking logger from Ignite.""" - - #::: if (it.logger === 'clearml') { :::# - logger = common.setup_clearml_logging( - trainer, optimizers, evaluators, config.log_every_iters - ) - #::: } else if (it.logger === 'mlflow') { :::# - logger = common.setup_mlflow_logging( - trainer, optimizers, evaluators, config.log_every_iters - ) - #::: } else if (it.logger === 'neptune') { :::# - logger = common.setup_neptune_logging( - trainer, optimizers, evaluators, config.log_every_iters - ) - #::: } else if (it.logger === 'polyaxon') { :::# - logger = common.setup_plx_logging( - trainer, optimizers, evaluators, config.log_every_iters - ) - #::: } else if (it.logger === 'tensorboard') { :::# - logger = common.setup_tb_logging( - config.output_dir, - trainer, - optimizers, - evaluators, - config.log_every_iters, - ) - #::: } else if (it.logger === 'visdom') { :::# - logger = common.setup_visdom_logging( - trainer, optimizers, evaluators, config.log_every_iters - ) - #::: } else if (it.logger === 'wandb') { :::# - logger = common.setup_wandb_logging( - trainer, optimizers, evaluators, config.log_every_iters - ) - #::: } :::# - return logger - - -#::: } :::# diff --git a/src/templates/template-vision-segmentation/config.yaml b/src/templates/template-vision-segmentation/config.yaml index b31fb31c..c56212b8 100644 --- a/src/templates/template-vision-segmentation/config.yaml +++ b/src/templates/template-vision-segmentation/config.yaml @@ -1,55 +1,4 @@ -seed: 666 -data_path: ./ -train_batch_size: 32 -eval_batch_size: 32 -num_workers: 4 -max_epochs: 20 -train_epoch_length: 1000 -eval_epoch_length: 1000 +#::= from_template_common ::# lr: 0.007 -use_amp: false -debug: false accumulation_steps: 4 num_classes: 21 - -#::: if (it.dist === 'spawn') { :::# -# distributed spawn -nproc_per_node: #:::= it.nproc_per_node :::# -#::: if (it.nnodes) { :::# -# distributed multi node spawn -nnodes: #:::= it.nnodes :::# -#::: if (it.nnodes > 1) { :::# -node_rank: 0 -master_addr: #:::= it.master_addr :::# -master_port: #:::= it.master_port :::# -#::: } :::# -#::: } :::# -#::: } :::# - -#::: if (it.filename_prefix) { :::# -filename_prefix: #:::= it.filename_prefix :::# -#::: } :::# - -#::: if (it.n_saved) { :::# -n_saved: #:::= it.n_saved :::# -#::: } :::# - -#::: if (it.save_every_iters) { :::# -save_every_iters: #:::= it.save_every_iters :::# -#::: } :::# - -#::: if (it.patience) { :::# -patience: #:::= it.patience :::# -#::: } :::# - -#::: if (it.limit_sec) { :::# -limit_sec: #:::= it.limit_sec :::# -#::: } :::# - -#::: if (it.output_dir) { :::# -output_dir: #:::= it.output_dir :::# -#::: } :::# - -#::: if (it.log_every_iters) { :::# -log_every_iters: #:::= it.log_every_iters :::# -#::: } :::# diff --git a/src/templates/template-vision-segmentation/main.py b/src/templates/template-vision-segmentation/main.py index 0ef48744..4fe947bc 100644 --- a/src/templates/template-vision-segmentation/main.py +++ b/src/templates/template-vision-segmentation/main.py @@ -195,28 +195,4 @@ def _(): #::: } :::# -# main entrypoint -def main(): - config = setup_config() - #::: if (it.dist === 'spawn') { :::# - #::: if (it.nproc_per_node && it.nnodes > 1 && it.master_addr && it.master_port) { :::# - kwargs = { - "nproc_per_node": config.nproc_per_node, - "nnodes": config.nnodes, - "node_rank": config.node_rank, - "master_addr": config.master_addr, - "master_port": config.master_port, - } - #::: } else if (it.nproc_per_node) { :::# - kwargs = {"nproc_per_node": config.nproc_per_node} - #::: } :::# - with idist.Parallel(config.backend, **kwargs) as p: - p.run(run, config=config) - #::: } else { :::# - with idist.Parallel(config.backend) as p: - p.run(run, config=config) - #::: } :::# - - -if __name__ == "__main__": - main() +#::= from_template_common ::# diff --git a/src/templates/template-vision-segmentation/trainers.py b/src/templates/template-vision-segmentation/trainers.py index f2d2148c..05b2f914 100644 --- a/src/templates/template-vision-segmentation/trainers.py +++ b/src/templates/template-vision-segmentation/trainers.py @@ -19,7 +19,7 @@ def setup_trainer( loss_fn: Module, device: Union[str, torch.device], train_sampler: Sampler, -): +) -> Union[Engine, DeterministicEngine]: prepare_batch = prepare_image_mask scaler = GradScaler(enabled=config.use_amp) @@ -63,7 +63,7 @@ def setup_evaluator( model: Module, metrics: Dict[str, Metric], device: Union[str, torch.device], -): +) -> Engine: prepare_batch = prepare_image_mask @torch.no_grad() diff --git a/src/templates/template-vision-segmentation/utils.py b/src/templates/template-vision-segmentation/utils.py index 6df43d06..f82e886b 100644 --- a/src/templates/template-vision-segmentation/utils.py +++ b/src/templates/template-vision-segmentation/utils.py @@ -1,144 +1,4 @@ -import logging -from argparse import ArgumentParser -from datetime import datetime -from logging import Logger -from pathlib import Path -from typing import Any, Mapping, Optional, Union - -import ignite.distributed as idist -import torch -import yaml -from ignite.contrib.engines import common -from ignite.engine import Engine -from ignite.engine.events import Events -from ignite.handlers import Checkpoint, DiskSaver, global_step_from_engine -from ignite.handlers.early_stopping import EarlyStopping -from ignite.handlers.terminate_on_nan import TerminateOnNan -from ignite.handlers.time_limit import TimeLimit -from ignite.utils import setup_logger - - -def get_default_parser(): - parser = ArgumentParser() - parser.add_argument("config", type=Path, help="Config file path") - parser.add_argument( - "--backend", - default=None, - choices=["nccl", "gloo"], - type=str, - help="DDP backend", - ) - return parser - - -def setup_config(parser=None): - if parser is None: - parser = get_default_parser() - - args = parser.parse_args() - config_path = args.config - - with open(config_path, "r") as f: - config = yaml.safe_load(f.read()) - - for k, v in config.items(): - setattr(args, k, v) - - return args - - -def log_metrics(engine: Engine, tag: str) -> None: - """Log `engine.state.metrics` with given `engine` and `tag`. - - Parameters - ---------- - engine - instance of `Engine` which metrics to log. - tag - a string to add at the start of output. - """ - metrics_format = "{0} [{1}/{2}]: {3}".format( - tag, engine.state.epoch, engine.state.iteration, engine.state.metrics - ) - engine.logger.info(metrics_format) - - -def resume_from( - to_load: Mapping, - checkpoint_fp: Union[str, Path], - logger: Logger, - strict: bool = True, - model_dir: Optional[str] = None, -) -> None: - """Loads state dict from a checkpoint file to resume the training. - - Parameters - ---------- - to_load - a dictionary with objects, e.g. {“model”: model, “optimizer”: optimizer, ...} - checkpoint_fp - path to the checkpoint file - logger - to log info about resuming from a checkpoint - strict - whether to strictly enforce that the keys in `state_dict` match the keys - returned by this module’s `state_dict()` function. Default: True - model_dir - directory in which to save the object - """ - if isinstance(checkpoint_fp, str) and checkpoint_fp.startswith("https://"): - checkpoint = torch.hub.load_state_dict_from_url( - checkpoint_fp, - model_dir=model_dir, - map_location="cpu", - check_hash=True, - ) - else: - if isinstance(checkpoint_fp, str): - checkpoint_fp = Path(checkpoint_fp) - - if not checkpoint_fp.exists(): - raise FileNotFoundError(f"Given {str(checkpoint_fp)} does not exist.") - checkpoint = torch.load(checkpoint_fp, map_location="cpu") - - Checkpoint.load_objects(to_load=to_load, checkpoint=checkpoint, strict=strict) - logger.info("Successfully resumed from a checkpoint: %s", checkpoint_fp) - - -def setup_output_dir(config: Any, rank: int) -> Path: - """Create output folder.""" - if rank == 0: - now = datetime.now().strftime("%Y%m%d-%H%M%S") - name = f"{now}-backend-{config.backend}-lr-{config.lr}" - path = Path(config.output_dir, name) - path.mkdir(parents=True, exist_ok=True) - config.output_dir = path.as_posix() - - return Path(idist.broadcast(config.output_dir, src=0)) - - -def setup_logging(config: Any) -> Logger: - """Setup logger with `ignite.utils.setup_logger()`. - - Parameters - ---------- - config - config object. config has to contain `verbose` and `output_dir` attribute. - - Returns - ------- - logger - an instance of `Logger` - """ - green = "\033[32m" - reset = "\033[0m" - logger = setup_logger( - name=f"{green}[ignite]{reset}", - level=logging.DEBUG if config.debug else logging.INFO, - format="%(name)s: %(message)s", - filepath=config.output_dir / "training-info.log", - ) - return logger +#::= from_template_common ::# #::: if (it.save_training || it.save_evaluation || it.patience || it.terminate_on_nan || it.limit_sec) { :::# @@ -210,50 +70,6 @@ def score_fn(engine: Engine): #::: } :::# -#::: } :::# - -#::: if (it.logger) { :::# - - -def setup_exp_logging(config, trainer, optimizers, evaluators): - """Setup Experiment Tracking logger from Ignite.""" - - #::: if (it.logger === 'clearml') { :::# - logger = common.setup_clearml_logging( - trainer, optimizers, evaluators, config.log_every_iters - ) - #::: } else if (it.logger === 'mlflow') { :::# - logger = common.setup_mlflow_logging( - trainer, optimizers, evaluators, config.log_every_iters - ) - #::: } else if (it.logger === 'neptune') { :::# - logger = common.setup_neptune_logging( - trainer, optimizers, evaluators, config.log_every_iters - ) - #::: } else if (it.logger === 'polyaxon') { :::# - logger = common.setup_plx_logging( - trainer, optimizers, evaluators, config.log_every_iters - ) - #::: } else if (it.logger === 'tensorboard') { :::# - logger = common.setup_tb_logging( - config.output_dir, - trainer, - optimizers, - evaluators, - config.log_every_iters, - ) - #::: } else if (it.logger === 'visdom') { :::# - logger = common.setup_visdom_logging( - trainer, optimizers, evaluators, config.log_every_iters - ) - #::: } else if (it.logger === 'wandb') { :::# - logger = common.setup_wandb_logging( - trainer, optimizers, evaluators, config.log_every_iters - ) - #::: } :::# - return logger - - #::: } :::#