Skip to content

Commit d1c223e

Browse files
authored
Merge pull request #528 from rtdip/develop
v0.8.2
2 parents 63cfe38 + 898e580 commit d1c223e

File tree

4 files changed

+8
-5
lines changed

4 files changed

+8
-5
lines changed

.github/release.yml

+4-1
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,10 @@ changelog:
4040
- documentation
4141
- title: Pipelines
4242
labels:
43-
- pipelines
43+
- pipelines
44+
- title: Queries
45+
labels:
46+
- queries
4447
- title: Other Changes
4548
labels:
4649
- "*"

environment.yml

+1-1
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,7 @@ dependencies:
4040
- fastapi==0.100.1
4141
- httpx==0.24.1
4242
- trio==0.22.1
43-
- pyspark>=3.3.0,<3.6.0
43+
- pyspark>=3.3.0,<3.5.0
4444
- delta-spark>=2.2.0,<3.1.0
4545
- grpcio>=1.48.1
4646
- grpcio-status>=1.48.1

setup.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -44,7 +44,7 @@
4444
]
4545

4646
PYSPARK_PACKAGES = [
47-
"pyspark>=3.3.0,<3.6.0",
47+
"pyspark>=3.3.0,<3.5.0",
4848
"delta-spark>=2.2.0,<2.5.0",
4949
]
5050

src/sdk/python/rtdip_sdk/pipelines/deploy/databricks.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -125,7 +125,7 @@ def deploy(self) -> Union[bool, ValueError]:
125125
"""
126126
# Add libraries to Databricks Job
127127
workspace_client = WorkspaceClient(
128-
host=self.host, token=self.token, auth_type="token"
128+
host=self.host, token=self.token, auth_type="pat"
129129
)
130130
for task in self.databricks_job.tasks:
131131
if task.notebook_task is None and task.spark_python_task is None:
@@ -263,7 +263,7 @@ def launch(self):
263263
Launches an RTDIP Pipeline Job in Databricks Workflows. This will perform the equivalent of a `Run Now` in Databricks Workflows
264264
"""
265265
workspace_client = WorkspaceClient(
266-
host=self.host, token=self.token, auth_type="token"
266+
host=self.host, token=self.token, auth_type="pat"
267267
)
268268
job_found = False
269269
for existing_job in workspace_client.jobs.list(name=self.databricks_job.name):

0 commit comments

Comments
 (0)