|
28 | 28 | parser.add_argument('--size', dest='batch_size', default=10, help='Number of line to import per connection')
|
29 | 29 | parser.add_argument('--skip', dest='skip', default=0, help='Skip until line [SKIP]')
|
30 | 30 | parser.add_argument('--fail', action='store_true', dest="fail", help='Fail mode')
|
31 |
| - parser.add_argument('-s', '--sep', dest="seprator", default=";", help='CSV separator') |
| 31 | + parser.add_argument('-s', '--sep', dest="separator", default=";", help='CSV separator') |
32 | 32 | parser.add_argument('--groupby', dest='split', help='Group data per batch with the same value for the given column in order to avoid concurrent update error')
|
33 | 33 | parser.add_argument('--ignore', dest='ignore', help='list of column separate by comma. Those column will be remove from the import request')
|
34 | 34 | parser.add_argument('--check', dest='check', action='store_true', help='Check if record are imported after each batch.')
|
|
56 | 56 | split = False
|
57 | 57 |
|
58 | 58 | import_threaded.import_data(args.config, args.model, file_csv=file_csv, context=context,
|
59 |
| - fail_file=fail_file, encoding=encoding, separator=args.seprator, |
| 59 | + fail_file=fail_file, encoding=encoding, separator=args.separator, |
60 | 60 | ignore=ignore, split=args.split, check=args.check,
|
61 | 61 | max_connection=max_connection, batch_size=batch_size, skip=int(args.skip))
|
62 |
| - |
0 commit comments