From c7d1d3878f22bb1213c29fb705333d46e493b3ea Mon Sep 17 00:00:00 2001 From: Gil Forsyth Date: Tue, 4 Feb 2025 07:04:35 -0500 Subject: [PATCH] fix(pyspark): generate `IF NOT EXISTS` if `force=True` (#10782) Co-authored-by: Phillip Cloud <417981+cpcloud@users.noreply.github.com> --- ibis/backends/pyspark/__init__.py | 4 +-- ibis/backends/pyspark/tests/test_ddl.py | 33 +++++++++++++++++++++++++ 2 files changed, 35 insertions(+), 2 deletions(-) diff --git a/ibis/backends/pyspark/__init__.py b/ibis/backends/pyspark/__init__.py index 519b85d7d026..9f9ad730f221 100644 --- a/ibis/backends/pyspark/__init__.py +++ b/ibis/backends/pyspark/__init__.py @@ -507,7 +507,7 @@ def create_database( sql = sge.Create( kind="DATABASE", - exist=force, + exists=force, this=sg.to_identifier(name, quoted=self.compiler.quoted), properties=properties, ) @@ -533,7 +533,7 @@ def drop_database( """ sql = sge.Drop( kind="DATABASE", - exist=force, + exists=force, this=sg.to_identifier(name, quoted=self.compiler.quoted), cascade=force, ) diff --git a/ibis/backends/pyspark/tests/test_ddl.py b/ibis/backends/pyspark/tests/test_ddl.py index cb00750a0003..5683e79f6635 100644 --- a/ibis/backends/pyspark/tests/test_ddl.py +++ b/ibis/backends/pyspark/tests/test_ddl.py @@ -9,6 +9,7 @@ import ibis from ibis import util +from ibis.backends.tests.errors import PySparkAnalysisException from ibis.tests.util import assert_equal pyspark = pytest.importorskip("pyspark") @@ -177,3 +178,35 @@ def test_create_table_reserved_identifier(con, alltypes, keyword_t): t = con.create_table(keyword_t, expr) result = t.count().execute() assert result == expected + + +@pytest.mark.xfail_version( + pyspark=["pyspark<3.5"], + raises=ValueError, + reason="PySparkAnalysisException is not available in PySpark <3.5", +) +def test_create_database_exists(con): + con.create_database(dbname := util.gen_name("dbname")) + + with pytest.raises(PySparkAnalysisException): + con.create_database(dbname) + + con.create_database(dbname, force=True) + + con.drop_database(dbname, force=True) + + +@pytest.mark.xfail_version( + pyspark=["pyspark<3.5"], + raises=ValueError, + reason="PySparkAnalysisException is not available in PySpark <3.5", +) +def test_drop_database_exists(con): + con.create_database(dbname := util.gen_name("dbname")) + + con.drop_database(dbname) + + with pytest.raises(PySparkAnalysisException): + con.drop_database(dbname) + + con.drop_database(dbname, force=True)