Skip to content

Commit

Permalink
Removed extra line before main fn (#272)
Browse files Browse the repository at this point in the history
* Removed extra line before main fn

* removed extra line in trainers.py

* - Refactored store.js into two files: store.js and codegen.js
- fixed code rendering issue with additional new lines
  • Loading branch information
vfdev-5 authored Jul 10, 2023
1 parent 46d3daa commit 9c65257
Show file tree
Hide file tree
Showing 15 changed files with 94 additions and 70 deletions.
4 changes: 2 additions & 2 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ jobs:
- run: pnpm build
- run: pnpm test:ci
- run: sh ./scripts/run_tests.sh unzip
- run: pnpm lint
- run: pnpm dist_lint

- name: 'Run ${{ matrix.template }} ${{ matrix.test }}'
run: sh ./scripts/run_tests.sh ${{ matrix.test }} ${{ matrix.template }}
Expand Down Expand Up @@ -110,4 +110,4 @@ jobs:
- run: pip install -Uq pip wheel && bash scripts/run_code_style.sh install
- run: npm install -g pnpm
- run: pnpm i --frozen-lockfile --color
- run: pnpm min_lint
- run: pnpm source_lint
8 changes: 5 additions & 3 deletions CONTRIBUTING.md
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,8 @@ To contribute to Code-Generator App, you will need Nodejs LTS v16, VSCode, Vetur

- Create a virtual environment for python.

- For docker users please see [this guide](docker/readme.md)

- With pip:

```sh
Expand Down Expand Up @@ -104,9 +106,9 @@ To add a new template,

```sh
# format
pnpm run fmt
pnpm fmt
# lint
pnpm run lint
pnpm source_lint
```

- To ensure the codebase complies with a style guide, we use black and ufmt to format and check codebase for compliance with PEP8. Install and run with:
Expand All @@ -117,7 +119,7 @@ To add a new template,
# format the codes
bash scripts/run_code_style.sh fmt
# lint the codes
bash scripts/run_code_style.sh lint
bash scripts/run_code_style.sh source_lint
```

_NOTE: Even if you have a half-completed/working PR, sending a PR is still a valid contribution and we can help you finish the PR._
Expand Down
3 changes: 2 additions & 1 deletion docker/readme.md
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,8 @@ pnpm dev
- Run ci tests locally

```bash
pnpm test:ci
pnpm dev &
pnpm test

sh ./scripts/run_tests.sh unzip
sh ./scripts/run_tests.sh simple vision-classification
Expand Down
4 changes: 2 additions & 2 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,8 @@
"test:ci": "start-server-and-test --expect 200 serve http://127.0.0.1:5000 test",
"release": "node scripts/release.js",
"fmt": "prettier --write . && bash scripts/run_code_style.sh fmt",
"min_lint": "prettier --check . && bash scripts/run_code_style.sh min_lint",
"lint": "prettier --check . && bash scripts/run_code_style.sh lint"
"source_lint": "prettier --check . && bash scripts/run_code_style.sh source_lint",
"dist_lint": "prettier --check . && bash scripts/run_code_style.sh dist_lint"
},
"dependencies": {
"@iconify/iconify": "^3.1.0",
Expand Down
8 changes: 6 additions & 2 deletions scripts/run_code_style.sh
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,16 @@

set -xeu

if [ $1 == "lint" ]; then
if [ $1 == "dist_lint" ]; then
# Check that ./dist-tests/ exists and code is unzipped
ls ./dist-tests/vision-classification-all/main.py
# Comment dist-tests in .gitignore to make black running on ./dist-tests folder
# TODO:
ufmt diff .
flake8 --select F401,F821 ./dist-tests # find unused imports and non imported objects
elif [ $1 == "min_lint" ]; then
# Restore .gitignore
# TODO:
elif [ $1 == "source_lint" ]; then
ufmt diff .
elif [ $1 == "fmt" ]; then
ufmt format .
Expand Down
40 changes: 40 additions & 0 deletions src/codegen.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
// @ts-check
import ejs from 'ejs'

// merges the code from the common and specific files using ejs
export function mergeCode(specificFileText, commonFileText) {
const replaced = specificFileText.replace(
/#::= from_template_common ::#\n/g,
commonFileText
)
return replaced
}

export function renderCode(code, config) {
// code = code
// .replace(/({ :::#[\n\s]+)/gi, '{ :::#')
// .replace(/([\s\n]+#:::\s}\s:::#)/gi, '#::: } :::#')

// replace `\s(s) or \n(s)#:::\s`
// with `#::: `
code = code.replace(/([\s\n]+#:::\s)/gi, '#::: ')

return ejs.render(code, config).replace(/ # usort: skip/g, '')
}

// render the code if there are fetched files for current selected template
export function generateFiles(currentFiles, store) {
for (const file in currentFiles) {
if (!store.config.include_test && file === 'test_all.py') {
delete store.code['test_all.py']
continue
}
store.code[file] = renderCode(currentFiles[file], store.config)
}
}

// ejs options
ejs.localsName = 'it'
ejs.delimiter = ':::'
ejs.openDelimiter = '#'
ejs.closeDelimiter = '#'
34 changes: 2 additions & 32 deletions src/store.js
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
// @ts-check
// central store for user input configs and generated codes
import { reactive, watch } from 'vue'
import ejs from 'ejs'

import templates from './templates/templates.json'
import { generateFiles, mergeCode } from './codegen'

// get env variables for template fetching
// @ts-ignore
Expand Down Expand Up @@ -59,36 +59,12 @@ export function saveConfig(key, value) {
}
}

// merges the code from the common and specific files using ejs
function mergeCode(specificFileText, commonFileText) {
const replaced = specificFileText.replace(
/#::= from_template_common ::#\n/g,
commonFileText
)
return replaced
}

// render the code if there are fetched files for current selected template
export function genCode() {
const currentFiles = files[store.config.template]
store.code = {} // empty the `store.code` after changing templates
if (currentFiles && Object.keys(currentFiles).length) {
for (const file in currentFiles) {
if (!store.config.include_test && file === 'test_all.py') {
delete store.code['test_all.py']
continue
}
store.code[file] = ejs
.render(
// replace `\s(s) or \n(s)#:::\s`
// with `#::: `
currentFiles[file].replace(/([\s\n]+#:::\s)/gi, '#::: '),
store.config
)
// trim ` #`
.replace(/\s{4}#$/gim, '')
.replace(/ # usort: skip/g, '')
}
generateFiles(currentFiles, store)
if (isDev) {
store.code[__DEV_CONFIG_FILE__] =
'# THIS FILE APPEARS ONLY IN DEV MODE\n' +
Expand Down Expand Up @@ -130,9 +106,3 @@ export async function fetchTemplates(template) {
// if that changed, call the genCode function
// same as watch(() => store.config, () => genCode(), { deep: true })
watch(store.config, () => genCode())

// ejs options
ejs.localsName = 'it'
ejs.delimiter = ':::'
ejs.openDelimiter = '#'
ejs.closeDelimiter = '#'
27 changes: 9 additions & 18 deletions src/templates/template-text-classification/main.py
Original file line number Diff line number Diff line change
@@ -1,24 +1,19 @@
import os
from pprint import pformat
from shutil import copy
from typing import Any, cast
from typing import Any

import ignite.distributed as idist
from data import setup_data
from ignite.engine import Events
from ignite.handlers import LRScheduler, PiecewiseLinear
from ignite.handlers import PiecewiseLinear
from ignite.metrics import Accuracy, Loss
from ignite.utils import manual_seed
from models import TransformerModel
from torch import nn, optim
from trainers import setup_evaluator, setup_trainer
from utils import *

try:
from torch.optim.lr_scheduler import _LRScheduler as PyTorchLRScheduler
except ImportError:
from torch.optim.lr_scheduler import LRScheduler as PyTorchLRScheduler

os.environ["TOKENIZERS_PARALLELISM"] = "false" # remove tokenizer paralleism warning


Expand Down Expand Up @@ -83,18 +78,11 @@ def run(local_rank: int, config: Any):
logger.info("Configuration: \n%s", pformat(vars(config)))
trainer.logger = evaluator.logger = logger

if isinstance(lr_scheduler, PyTorchLRScheduler):
trainer.add_event_handler(
Events.ITERATION_COMPLETED,
lambda engine: cast(PyTorchLRScheduler, lr_scheduler).step(),
)
elif isinstance(lr_scheduler, LRScheduler):
trainer.add_event_handler(Events.ITERATION_COMPLETED, lr_scheduler)
else:
trainer.add_event_handler(Events.ITERATION_STARTED, lr_scheduler)
trainer.add_event_handler(Events.ITERATION_COMPLETED, lr_scheduler)

# setup ignite handlers
#::: if (it.save_training || it.save_evaluation) { :::#

# setup ignite handlers
#::: if (it.save_training) { :::#
to_save_train = {
"model": model,
Expand All @@ -118,6 +106,7 @@ def run(local_rank: int, config: Any):
#::: } :::#

#::: if (it.logger) { :::#

# experiment tracking
if rank == 0:
exp_logger = setup_exp_logging(config, trainer, optimizer, evaluator)
Expand Down Expand Up @@ -155,12 +144,14 @@ def _():
)

#::: if (it.logger) { :::#

# close logger
if rank == 0:
exp_logger.close()
#::: } :::#
#

#::: if (it.save_training || it.save_evaluation) { :::#

# show last checkpoint names
logger.info(
"Last training checkpoint name - %s",
Expand Down
3 changes: 2 additions & 1 deletion src/templates/template-text-classification/trainers.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,10 +47,11 @@ def train_function(engine: Union[Engine, DeterministicEngine], batch: Any):
engine.state.metrics = metric
return metric

#
#::: if(it.deterministic) { :::#

trainer = DeterministicEngine(train_function)
#::: } else { :::#

trainer = Engine(train_function)
#::: } :::#

Expand Down
8 changes: 6 additions & 2 deletions src/templates/template-vision-classification/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,8 +66,9 @@ def run(local_rank: int, config: Any):

trainer.add_event_handler(Events.ITERATION_COMPLETED, lr_scheduler)

# setup ignite handlers
#::: if (it.save_training || it.save_evaluation) { :::#

# setup ignite handlers
#::: if (it.save_training) { :::#
to_save_train = {
"model": model,
Expand All @@ -91,6 +92,7 @@ def run(local_rank: int, config: Any):
#::: } :::#

#::: if (it.logger) { :::#

# experiment tracking
if rank == 0:
exp_logger = setup_exp_logging(config, trainer, optimizer, evaluator)
Expand Down Expand Up @@ -128,12 +130,14 @@ def _():
)

#::: if (it.logger) { :::#

# close logger
if rank == 0:
exp_logger.close()
#::: } :::#
#

#::: if (it.save_training || it.save_evaluation) { :::#

# show last checkpoint names
logger.info(
"Last training checkpoint name - %s",
Expand Down
3 changes: 2 additions & 1 deletion src/templates/template-vision-classification/trainers.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,10 +38,11 @@ def train_function(engine: Union[Engine, DeterministicEngine], batch: Any):
}
return {"train_loss": train_loss}

#
#::: if(it.deterministic) { :::#

trainer = DeterministicEngine(train_function)
#::: } else { :::#

trainer = Engine(train_function)
#::: } :::#

Expand Down
8 changes: 6 additions & 2 deletions src/templates/template-vision-dcgan/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,8 +81,9 @@ def run(local_rank: int, config: Any):
logger.info("Configuration: \n%s", pformat(vars(config)))
trainer.logger = evaluator.logger = logger

# setup ignite handlers
#::: if (it.save_training || it.save_evaluation) { :::#

# setup ignite handlers
#::: if (it.save_training) { :::#
to_save_train = {
"model_d": model_d,
Expand All @@ -107,6 +108,7 @@ def run(local_rank: int, config: Any):
#::: } :::#

#::: if (it.logger) { :::#

# experiment tracking
if rank == 0:
exp_logger = setup_exp_logging(
Expand Down Expand Up @@ -163,12 +165,14 @@ def _():
)

#::: if (it.logger) { :::#

# close logger
if rank == 0:
exp_logger.close()
#::: } :::#
#

#::: if (it.save_training || it.save_evaluation) { :::#

# show last checkpoint names
logger.info(
"Last training checkpoint name - %s",
Expand Down
3 changes: 2 additions & 1 deletion src/templates/template-vision-dcgan/trainers.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,10 +87,11 @@ def train_function(engine: Union[Engine, DeterministicEngine], batch: Any):

return metrics

#
#::: if(it.deterministic) { :::#

trainer = DeterministicEngine(train_function)
#::: } else { :::#

trainer = Engine(train_function)
#::: } :::#

Expand Down
Loading

0 comments on commit 9c65257

Please sign in to comment.