Skip to content

Commit 482d04a

Browse files
committed
style(ruff): ruff fixes
1 parent db964da commit 482d04a

File tree

21 files changed

+50
-53
lines changed

21 files changed

+50
-53
lines changed

docs/_renderer.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -61,9 +61,9 @@ def render(self, el: qd.ast.ExampleCode) -> str:
6161
if expect_failure in first or any(
6262
expect_failure in line for line in rest
6363
):
64-
assert (
65-
start and end
66-
), "expected failure should never occur alongside a skipped doctest example"
64+
assert start and end, (
65+
"expected failure should never occur alongside a skipped doctest example"
66+
)
6767
result.append("#| error: true")
6868

6969
# remove the quartodoc markers from the rendered code

ibis/backends/clickhouse/tests/test_client.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -369,7 +369,7 @@ def test_create_table_no_syntax_error(con):
369369

370370

371371
def test_password_with_bracket():
372-
password = f'{os.environ.get("IBIS_TEST_CLICKHOUSE_PASSWORD", "")}[]'
372+
password = f"{os.environ.get('IBIS_TEST_CLICKHOUSE_PASSWORD', '')}[]"
373373
quoted_pass = quote_plus(password)
374374
host = os.environ.get("IBIS_TEST_CLICKHOUSE_HOST", "localhost")
375375
user = os.environ.get("IBIS_TEST_CLICKHOUSE_USER", "default")

ibis/backends/duckdb/tests/conftest.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -66,23 +66,23 @@ def ddl_script(self) -> Iterator[str]:
6666
yield (
6767
f"""
6868
CREATE OR REPLACE TABLE {table} AS
69-
SELECT * FROM read_parquet('{parquet_dir / f'{table}.parquet'}')
69+
SELECT * FROM read_parquet('{parquet_dir / f"{table}.parquet"}')
7070
"""
7171
)
7272
if not SANDBOXED:
7373
for table in TEST_TABLES_GEO:
7474
yield (
7575
f"""
7676
CREATE OR REPLACE TABLE {table} AS
77-
SELECT * FROM st_read('{geojson_dir / f'{table}.geojson'}')
77+
SELECT * FROM st_read('{geojson_dir / f"{table}.geojson"}')
7878
"""
7979
)
8080
for table in TEST_TABLE_GEO_PARQUET:
8181
# the ops on this table will need the spatial extension
8282
yield (
8383
f"""
8484
CREATE OR REPLACE TABLE {table} AS
85-
SELECT * FROM read_parquet('{parquet_dir / f'{table}.parquet'}')
85+
SELECT * FROM read_parquet('{parquet_dir / f"{table}.parquet"}')
8686
"""
8787
)
8888
yield (

ibis/backends/impala/__init__.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -341,8 +341,7 @@ def drop_database(self, name, force=False):
341341
)
342342
elif tables or udfs or udas:
343343
raise com.IntegrityError(
344-
f"Database {name} must be empty before "
345-
"being dropped, or set force=True"
344+
f"Database {name} must be empty before being dropped, or set force=True"
346345
)
347346
statement = ddl.DropDatabase(name, must_exist=not force)
348347
self._safe_exec_sql(statement)

ibis/backends/impala/tests/test_ddl.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -15,11 +15,11 @@
1515

1616
pytest.importorskip("impala")
1717

18-
from impala.error import HiveServer2Error # noqa: E402
18+
from impala.error import HiveServer2Error
1919

2020

2121
@pytest.fixture
22-
def temp_view(con) -> str:
22+
def temp_view(con):
2323
name = util.gen_name("view")
2424
yield name
2525
con.drop_view(name, force=True)

ibis/backends/impala/tests/test_exprs.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -148,9 +148,9 @@ def _check_impala_output_types_match(con, table):
148148
for n, left_ty, right_ty in zip(
149149
left_schema.names, left_schema.types, right_schema.types
150150
):
151-
assert (
152-
left_ty == right_ty
153-
), f"Value for {n} had left type {left_ty} and right type {right_ty}\nquery:\n{query}"
151+
assert left_ty == right_ty, (
152+
f"Value for {n} had left type {left_ty} and right type {right_ty}\nquery:\n{query}"
153+
)
154154

155155

156156
@pytest.mark.parametrize(

ibis/backends/impala/tests/test_parquet_ddl.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@
99

1010
pytest.importorskip("impala")
1111

12-
from impala.error import HiveServer2Error # noqa: E402
12+
from impala.error import HiveServer2Error
1313

1414

1515
def test_parquet_file_with_name(con, test_data_dir, temp_table):

ibis/backends/impala/tests/test_partition.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@
1010

1111
pytest.importorskip("impala")
1212

13-
from impala.error import Error as ImpylaError # noqa: E402
13+
from impala.error import Error as ImpylaError
1414

1515

1616
@pytest.fixture

ibis/backends/sql/compilers/base.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -205,9 +205,9 @@ def array(self, *args: Any) -> sge.Array:
205205
first, *rest = args
206206

207207
if isinstance(first, sge.Select):
208-
assert (
209-
not rest
210-
), "only one argument allowed when `first` is a select statement"
208+
assert not rest, (
209+
"only one argument allowed when `first` is a select statement"
210+
)
211211

212212
return sge.Array(expressions=list(map(sge.convert, (first, *rest))))
213213

ibis/backends/sql/compilers/bigquery/udf/core.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -283,9 +283,9 @@ def visit_NameConstant(self, node):
283283
return "true"
284284
elif value is False:
285285
return "false"
286-
assert (
287-
value is None
288-
), f"value is not True and is not False, must be None, got {value}"
286+
assert value is None, (
287+
f"value is not True and is not False, must be None, got {value}"
288+
)
289289
return "null"
290290

291291
def visit_Str(self, node):

ibis/backends/sql/compilers/pyspark.py

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -251,8 +251,7 @@ def visit_LastValue(self, op, *, arg):
251251
def visit_First(self, op, *, arg, where, order_by, include_null):
252252
if where is not None and include_null:
253253
raise com.UnsupportedOperationError(
254-
"Combining `include_null=True` and `where` is not supported "
255-
"by pyspark"
254+
"Combining `include_null=True` and `where` is not supported by pyspark"
256255
)
257256
out = self.agg.first(arg, where=where, order_by=order_by)
258257
if not include_null:
@@ -262,8 +261,7 @@ def visit_First(self, op, *, arg, where, order_by, include_null):
262261
def visit_Last(self, op, *, arg, where, order_by, include_null):
263262
if where is not None and include_null:
264263
raise com.UnsupportedOperationError(
265-
"Combining `include_null=True` and `where` is not supported "
266-
"by pyspark"
264+
"Combining `include_null=True` and `where` is not supported by pyspark"
267265
)
268266
out = self.agg.last(arg, where=where, order_by=order_by)
269267
if not include_null:

ibis/backends/sql/compilers/snowflake.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -847,9 +847,9 @@ def visit_ArrayFilter(self, op, *, arg, param, body, index):
847847
)
848848

849849
def visit_JoinLink(self, op, *, how, table, predicates):
850-
assert (
851-
predicates or how == "cross"
852-
), "expected non-empty predicates when not a cross join"
850+
assert predicates or how == "cross", (
851+
"expected non-empty predicates when not a cross join"
852+
)
853853

854854
if how == "asof":
855855
# the asof join match condition is always the first predicate by

ibis/backends/sql/datatypes.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -629,9 +629,9 @@ def _from_sqlglot_TIMESTAMP_NS(cls, nullable: bool | None = None) -> dt.Timestam
629629

630630
@classmethod
631631
def _from_ibis_GeoSpatial(cls, dtype: dt.GeoSpatial):
632-
assert (
633-
dtype.geotype == "geometry"
634-
), "DuckDB only supports geometry types; geography types are not supported"
632+
assert dtype.geotype == "geometry", (
633+
"DuckDB only supports geometry types; geography types are not supported"
634+
)
635635
return sge.DataType(this=typecode.GEOMETRY)
636636

637637
_from_ibis_Point = _from_ibis_LineString = _from_ibis_Polygon = (

ibis/backends/tests/test_vectorized_udf.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -277,9 +277,9 @@ def test_elementwise_udf(udf_backend, udf_alltypes, udf_df, udf):
277277
result = expr.execute()
278278

279279
expected_func = getattr(expr.op(), "__func__", getattr(udf, "func", None))
280-
assert (
281-
expected_func is not None
282-
), f"neither __func__ nor func attributes found on {udf} or expr object"
280+
assert expected_func is not None, (
281+
f"neither __func__ nor func attributes found on {udf} or expr object"
282+
)
283283

284284
expected = expected_func(udf_df["double_col"])
285285
udf_backend.assert_series_equal(result, expected, check_names=False)
@@ -292,9 +292,9 @@ def test_elementwise_udf_mutate(udf_backend, udf_alltypes, udf_df, udf):
292292
result = expr.execute()
293293

294294
expected_func = getattr(udf_expr.op(), "__func__", getattr(udf, "func", None))
295-
assert (
296-
expected_func is not None
297-
), f"neither __func__ nor func attributes found on {udf} or expr object"
295+
assert expected_func is not None, (
296+
f"neither __func__ nor func attributes found on {udf} or expr object"
297+
)
298298

299299
expected = udf_df.assign(incremented=expected_func(udf_df["double_col"]))
300300
udf_backend.assert_series_equal(result["incremented"], expected["incremented"])

ibis/backends/tests/tpc/conftest.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -31,9 +31,9 @@ def pytest_pyfunc_call(pyfuncitem):
3131
testargs["backend"] = backend
3232

3333
result = testfunction(**testargs)
34-
assert (
35-
result is None
36-
), "test function should not return anything, did you mean to use assert?"
34+
assert result is None, (
35+
"test function should not return anything, did you mean to use assert?"
36+
)
3737
return True
3838

3939

ibis/expr/datatypes/core.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -785,8 +785,7 @@ def __init__(
785785
if precision is not None:
786786
if not isinstance(precision, numbers.Integral):
787787
raise TypeError(
788-
"Decimal type precision must be an integer; "
789-
f"got {type(precision)}"
788+
f"Decimal type precision must be an integer; got {type(precision)}"
790789
)
791790
if precision < 0:
792791
raise ValueError("Decimal type precision cannot be negative")

ibis/expr/datatypes/tests/test_value.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,7 @@ class Foo(enum.Enum):
4545
(-32769, dt.int32),
4646
(-2147483649, dt.int64),
4747
(1.5, dt.double),
48-
(decimal.Decimal(1.5), dt.decimal),
48+
(decimal.Decimal("1.5"), dt.decimal),
4949
# parametric types
5050
(list("abc"), dt.Array(dt.string)),
5151
(set("abc"), dt.Array(dt.string)),

ibis/expr/types/generic.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1526,9 +1526,9 @@ def __pandas_result__(
15261526
if data_mapper is None:
15271527
from ibis.formats.pandas import PandasData as data_mapper
15281528

1529-
assert (
1530-
len(df.columns) == 1
1531-
), "more than one column when converting columnar result DataFrame to Series"
1529+
assert len(df.columns) == 1, (
1530+
"more than one column when converting columnar result DataFrame to Series"
1531+
)
15321532
# in theory we could use df.iloc[:, 0], but there seems to be a bug in
15331533
# older geopandas where df.iloc[:, 0] doesn't return the same kind of
15341534
# object as df.loc[:, column_name] when df is a GeoDataFrame

ibis/expr/types/relations.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2958,7 +2958,7 @@ def describe(
29582958
col_max = lit(None).cast(float)
29592959
col_mode = lit(None).cast(str)
29602960
quantile_values = {
2961-
f"p{100*q:.6f}".rstrip("0").rstrip("."): lit(None).cast(float)
2961+
f"p{100 * q:.6f}".rstrip("0").rstrip("."): lit(None).cast(float)
29622962
for q in quantile
29632963
}
29642964

@@ -2969,7 +2969,9 @@ def describe(
29692969
col_min = col.min().cast(float)
29702970
col_max = col.max().cast(float)
29712971
quantile_values = {
2972-
f"p{100*q:.6f}".rstrip("0").rstrip("."): col.quantile(q).cast(float)
2972+
f"p{100 * q:.6f}".rstrip("0").rstrip("."): col.quantile(q).cast(
2973+
float
2974+
)
29732975
for q in quantile
29742976
}
29752977
elif typ.is_string():

ibis/expr/types/strings.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -98,12 +98,11 @@ def __getitem__(self, key: slice | int | ir.IntegerScalar) -> StringValue:
9898
raise ValueError("Step can only be 1")
9999
if start is not None and not isinstance(start, ir.Expr) and start < 0:
100100
raise ValueError(
101-
"Negative slicing not yet supported, got start value "
102-
f"of {start:d}"
101+
f"Negative slicing not yet supported, got start value of {start:d}"
103102
)
104103
if stop is not None and not isinstance(stop, ir.Expr) and stop < 0:
105104
raise ValueError(
106-
"Negative slicing not yet supported, got stop value " f"of {stop:d}"
105+
f"Negative slicing not yet supported, got stop value of {stop:d}"
107106
)
108107
if start is None and stop is None:
109108
return self

ibis/tests/expr/test_pretty_repr.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@
1010

1111
pytest.importorskip("rich")
1212

13-
from ibis.expr.types.pretty import format_column, format_values # noqa: E402
13+
from ibis.expr.types.pretty import format_column, format_values
1414

1515
pd = pytest.importorskip("pandas")
1616

0 commit comments

Comments
 (0)