Skip to content

Commit

Permalink
[Jobs] Add --python-named-params to databricks jobs run-now (#425)
Browse files Browse the repository at this point in the history
* Add python_named_parameters to run_now_cli in jobs cli

* revert styling changes

* Update databricks_cli/jobs/api.py
  • Loading branch information
nordp authored Feb 3, 2022
1 parent 0296117 commit 7a3a18b
Show file tree
Hide file tree
Showing 5 changed files with 20 additions and 9 deletions.
7 changes: 4 additions & 3 deletions databricks_cli/jobs/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,10 +49,11 @@ def reset_job(self, json, headers=None, version=None):
return self.client.client.perform_query('POST', '/jobs/reset', data=json, headers=headers,
version=version)

def run_now(self, job_id, jar_params, notebook_params, python_params, spark_submit_params,
headers=None, version=None):
def run_now(self, job_id, jar_params, notebook_params, python_params, python_named_params,
spark_submit_params, headers=None, version=None):
return self.client.run_now(job_id, jar_params, notebook_params, python_params,
spark_submit_params, headers=headers, version=version)
python_named_params, spark_submit_params, headers=headers,
version=version)

def _list_jobs_by_name(self, name, headers=None):
jobs = self.list_jobs(headers=headers)['jobs']
Expand Down
10 changes: 7 additions & 3 deletions databricks_cli/jobs/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -207,6 +207,9 @@ def get_cli(api_client, job_id, version):
'i.e. {"name": "john doe", "age": 35}')
@click.option('--python-params', default=None, type=JsonClickType(),
help='JSON string specifying an array of parameters. i.e. ["param1", "param2"]')
@click.option('--python-named-params', default=None, type=JsonClickType(),
help='JSON string specifying a map of key-value pairs. '
'i.e. {"name": "john doe", "age": 35}')
@click.option('--spark-submit-params', default=None, type=JsonClickType(),
help='JSON string specifying an array of parameters. i.e. '
'["--class", "org.apache.spark.examples.SparkPi"]')
Expand All @@ -217,7 +220,7 @@ def get_cli(api_client, job_id, version):
@eat_exceptions
@provide_api_client
def run_now_cli(api_client, job_id, jar_params, notebook_params, python_params,
spark_submit_params, version):
python_named_params, spark_submit_params, version):
"""
Runs a job with optional per-run parameters.
Expand All @@ -228,10 +231,11 @@ def run_now_cli(api_client, job_id, jar_params, notebook_params, python_params,
jar_params_json = json_loads(jar_params) if jar_params else None
notebook_params_json = json_loads(notebook_params) if notebook_params else None
python_params = json_loads(python_params) if python_params else None
python_named_params = json_loads(python_named_params) if python_named_params else None
spark_submit_params = json_loads(spark_submit_params) if spark_submit_params else None
res = JobsApi(api_client).run_now(
job_id, jar_params_json, notebook_params_json, python_params, spark_submit_params,
version=version)
job_id, jar_params_json, notebook_params_json, python_params,
python_named_params, spark_submit_params, version=version)
click.echo(pretty_format(res))


Expand Down
4 changes: 3 additions & 1 deletion databricks_cli/sdk/service.py
Original file line number Diff line number Diff line change
Expand Up @@ -156,7 +156,7 @@ def list_jobs(self, job_type=None, expand_tasks=None, limit=None, offset=None, h

return self.client.perform_query('GET', '/jobs/list', data=_data, headers=headers, version=version)

def run_now(self, job_id=None, jar_params=None, notebook_params=None, python_params=None,
def run_now(self, job_id=None, jar_params=None, notebook_params=None, python_params=None, python_named_params=None,
spark_submit_params=None, headers=None, version=None):
_data = {}
if job_id is not None:
Expand All @@ -167,6 +167,8 @@ def run_now(self, job_id=None, jar_params=None, notebook_params=None, python_par
_data['notebook_params'] = notebook_params
if python_params is not None:
_data['python_params'] = python_params
if python_named_params is not None:
_data['python_named_params'] = python_named_params
if spark_submit_params is not None:
_data['spark_submit_params'] = spark_submit_params
return self.client.perform_query('POST', '/jobs/run-now', data=_data, headers=headers, version=version)
Expand Down
4 changes: 2 additions & 2 deletions tests/jobs/test_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -131,13 +131,13 @@ def test_list_jobs():
def test_run_now():
with mock.patch('databricks_cli.sdk.ApiClient') as api_client_mock:
api = JobsApi(api_client_mock)
api.run_now('1', ['bla'], None, None, None)
api.run_now('1', ['bla'], None, None, None, None)
api_client_mock.perform_query.assert_called_with(
'POST', '/jobs/run-now', data={'job_id': '1', 'jar_params': ['bla']},
headers=None, version=None
)

api.run_now('1', ['bla'], None, None, None, version='3.0')
api.run_now('1', ['bla'], None, None, None, None, version='3.0')
api_client_mock.perform_query.assert_called_with(
'POST', '/jobs/run-now', data={'job_id': '1', 'jar_params': ['bla']},
headers=None, version='3.0'
Expand Down
4 changes: 4 additions & 0 deletions tests/jobs/test_cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -191,6 +191,7 @@ def test_list_jobs_type_pipeline(jobs_api_mock):
NOTEBOOK_PARAMS = '{"a": 1}'
JAR_PARAMS = '[1, 2, 3]'
PYTHON_PARAMS = '["python", "params"]'
PYTHON_NAMED_PARAMS = '{"python": "named", "params": 1}'
SPARK_SUBMIT_PARAMS = '["--class", "org.apache.spark.examples.SparkPi"]'


Expand All @@ -217,6 +218,7 @@ def test_run_now_with_params(jobs_api_mock):
'--jar-params', JAR_PARAMS,
'--notebook-params', NOTEBOOK_PARAMS,
'--python-params', PYTHON_PARAMS,
'--python-named-params', PYTHON_NAMED_PARAMS,
'--spark-submit-params', SPARK_SUBMIT_PARAMS])
assert jobs_api_mock.run_now.call_args[0][0] == 1
assert jobs_api_mock.run_now.call_args[0][1] == json.loads(JAR_PARAMS)
Expand All @@ -225,6 +227,8 @@ def test_run_now_with_params(jobs_api_mock):
assert jobs_api_mock.run_now.call_args[0][3] == json.loads(
PYTHON_PARAMS)
assert jobs_api_mock.run_now.call_args[0][4] == json.loads(
PYTHON_NAMED_PARAMS)
assert jobs_api_mock.run_now.call_args[0][5] == json.loads(
SPARK_SUBMIT_PARAMS)
assert echo_mock.call_args[0][0] == pretty_format(RUN_NOW_RETURN)

Expand Down

0 comments on commit 7a3a18b

Please sign in to comment.