Skip to content

Commit 4dd26d9

Browse files
committed
Merge branch 'main' into jprakash-db/complex-param
Merged Main
2 parents ddbe54e + 7b51c6e commit 4dd26d9

File tree

9 files changed

+34354
-95463
lines changed

9 files changed

+34354
-95463
lines changed

CHANGELOG.md

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,9 @@
11
# Release History
22

3+
# 4.0.3 (2025-04-22)
4+
5+
- Fix: Removed `packaging` dependency in favour of default libraries, for `urllib3` version checks (databricks/databricks-sql-python#547 by @jprakash-db)
6+
-
37
# 4.0.2 (2025-04-01)
48

59
- Fix: Relaxed the pin for `python-dateutil` to be `^2.8.0` (databricks/databricks-sql-python#538 by @jprakash-db)

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[tool.poetry]
22
name = "databricks-sql-connector"
3-
version = "4.0.2"
3+
version = "4.0.3"
44
description = "Databricks SQL Connector for Python"
55
authors = ["Databricks <[email protected]>"]
66
license = "Apache-2.0"

src/databricks/sql/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -68,7 +68,7 @@ def __repr__(self):
6868
DATE = DBAPITypeObject("date")
6969
ROWID = DBAPITypeObject()
7070

71-
__version__ = "4.0.2"
71+
__version__ = "4.0.3"
7272
USER_AGENT_NAME = "PyDatabricksSqlConnector"
7373

7474
# These two functions are pyhive legacy

src/databricks/sql/auth/retry.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22
import random
33
import time
44
import typing
5+
from importlib.metadata import version
56
from enum import Enum
67
from typing import List, Optional, Tuple, Union
78

@@ -16,7 +17,6 @@
1617
from urllib3 import HTTPResponse as BaseHTTPResponse
1718
from urllib3 import Retry
1819
from urllib3.util.retry import RequestHistory
19-
from packaging import version
2020

2121

2222
from databricks.sql.exc import (
@@ -312,7 +312,9 @@ def get_backoff_time(self) -> float:
312312

313313
current_attempt = self.stop_after_attempts_count - int(self.total or 0)
314314
proposed_backoff = (2**current_attempt) * self.delay_min
315-
if version.parse(urllib3.__version__) >= version.parse("2.0.0"):
315+
316+
library_version = version("urllib3")
317+
if int(library_version.split(".")[0]) >= 2:
316318
if self.backoff_jitter != 0.0:
317319
proposed_backoff += random.random() * self.backoff_jitter
318320

src/databricks/sql/client.py

Lines changed: 14 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1417,9 +1417,22 @@ def fetchall_arrow(self) -> "pyarrow.Table":
14171417
while not self.has_been_closed_server_side and self.has_more_rows:
14181418
self._fill_results_buffer()
14191419
partial_results = self.results.remaining_rows()
1420-
results = pyarrow.concat_tables([results, partial_results])
1420+
if isinstance(results, ColumnTable) and isinstance(
1421+
partial_results, ColumnTable
1422+
):
1423+
results = self.merge_columnar(results, partial_results)
1424+
else:
1425+
results = pyarrow.concat_tables([results, partial_results])
14211426
self._next_row_index += partial_results.num_rows
14221427

1428+
# If PyArrow is installed and we have a ColumnTable result, convert it to PyArrow Table
1429+
# Valid only for metadata commands result set
1430+
if isinstance(results, ColumnTable) and pyarrow:
1431+
data = {
1432+
name: col
1433+
for name, col in zip(results.column_names, results.column_table)
1434+
}
1435+
return pyarrow.Table.from_pydict(data)
14231436
return results
14241437

14251438
def fetchall_columnar(self):

0 commit comments

Comments
 (0)