Skip to content

Commit

Permalink
Merge pull request #522 from rtdip/develop
Browse files Browse the repository at this point in the history
v0.8.1
  • Loading branch information
GBBBAS authored Oct 4, 2023
2 parents 8729332 + c57d345 commit 63cfe38
Show file tree
Hide file tree
Showing 3 changed files with 56 additions and 12 deletions.
8 changes: 6 additions & 2 deletions src/sdk/python/rtdip_sdk/pipelines/deploy/databricks.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,7 +124,9 @@ def deploy(self) -> Union[bool, ValueError]:
Deploys an RTDIP Pipeline Job to Databricks Workflows. The deployment is managed by the Job Name and therefore will overwrite any existing workflow in Databricks with the same name.
"""
# Add libraries to Databricks Job
workspace_client = WorkspaceClient(host=self.host, token=self.token)
workspace_client = WorkspaceClient(
host=self.host, token=self.token, auth_type="token"
)
for task in self.databricks_job.tasks:
if task.notebook_task is None and task.spark_python_task is None:
return ValueError(
Expand Down Expand Up @@ -260,7 +262,9 @@ def launch(self):
"""
Launches an RTDIP Pipeline Job in Databricks Workflows. This will perform the equivalent of a `Run Now` in Databricks Workflows
"""
workspace_client = WorkspaceClient(host=self.host, token=self.token)
workspace_client = WorkspaceClient(
host=self.host, token=self.token, auth_type="token"
)
job_found = False
for existing_job in workspace_client.jobs.list(name=self.databricks_job.name):
workspace_client.jobs.run_now(job_id=existing_job.job_id)
Expand Down
8 changes: 4 additions & 4 deletions src/sdk/python/rtdip_sdk/queries/query_builder.py
Original file line number Diff line number Diff line change
Expand Up @@ -328,7 +328,7 @@ def time_weighted_average(

def metadata(
self,
tagname_filter: [str],
tagname_filter: [str] = None,
limit: int = None,
offset: int = None,
) -> DataFrame:
Expand All @@ -345,7 +345,7 @@ def metadata(
"""
metadata_parameters = {
"source": self.data_source,
"tag_names": tagname_filter,
"tag_names": [] if tagname_filter is None else tagname_filter,
"tagname_column": self.tagname_column,
"limit": limit,
"offset": offset,
Expand All @@ -355,7 +355,7 @@ def metadata(

def latest(
self,
tagname_filter: [str],
tagname_filter: [str] = None,
limit: int = None,
offset: int = None,
) -> DataFrame:
Expand All @@ -372,7 +372,7 @@ def latest(
"""
latest_parameters = {
"source": self.data_source,
"tag_names": tagname_filter,
"tag_names": [] if tagname_filter is None else tagname_filter,
"tagname_column": self.tagname_column,
"limit": limit,
"offset": offset,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -49,12 +49,48 @@ def __name__(self):


class DummyJob:
def __init__(self):
jobs = []

def __init__(
self,
):
return None

def create(self, **kwargs):
return None

def reset(self, job_id=None, new_settings=None):
return None

def run_now(self, job_id=None):
return None

def list(self, name=None, job_id=None):
return [self]

job_id = 1


class DummyWorkspace:
def __init__(self):
return None

def mkdirs(self, path=None):
return None

def upload(self, path=None, overwrite=True, content=None):
return None


class DummyWorkspaceClient:
def __init__(self):
return None

workspace = DummyWorkspace()

jobs = DummyJob()


default_version = "0.0.0rc0"
default_list_package = "databricks.sdk.service.jobs.JobsAPI.list"

Expand Down Expand Up @@ -98,11 +134,15 @@ def test_pipeline_job_deploy(mocker: MockerFixture):
return_value=None,
)
mocker.patch(
"databricks.sdk.mixins.workspace.WorkspaceExt.mkdirs", return_value=None
)
mocker.patch(
"databricks.sdk.mixins.workspace.WorkspaceExt.upload", return_value=None
)
"src.sdk.python.rtdip_sdk.pipelines.deploy.databricks.WorkspaceClient",
return_value=DummyWorkspaceClient(),
)
# mocker.patch(
# "databricks.sdk.mixins.workspace.WorkspaceExt.mkdirs", return_value=None
# )
# mocker.patch(
# "databricks.sdk.mixins.workspace.WorkspaceExt.upload", return_value=None
# )
libraries = Libraries(
pypi_libraries=[PyPiLibrary(name="rtdip-sdk", version=default_version)],
maven_libraries=[
Expand Down

0 comments on commit 63cfe38

Please sign in to comment.