From 3b47a7a0caad3c7126cdd3c8c4253a7cfef42d85 Mon Sep 17 00:00:00 2001 From: Gil Forsyth Date: Mon, 3 Feb 2025 11:05:43 -0500 Subject: [PATCH] fix(pyspark): generate `IF NOT EXISTS` if `force=True` We were using the wrong keyword in `sqlglot` for this so it was being swallowed silently. --- ibis/backends/pyspark/__init__.py | 4 ++-- ibis/backends/pyspark/tests/test_ddl.py | 23 +++++++++++++++++++++++ 2 files changed, 25 insertions(+), 2 deletions(-) diff --git a/ibis/backends/pyspark/__init__.py b/ibis/backends/pyspark/__init__.py index 519b85d7d026..9f9ad730f221 100644 --- a/ibis/backends/pyspark/__init__.py +++ b/ibis/backends/pyspark/__init__.py @@ -507,7 +507,7 @@ def create_database( sql = sge.Create( kind="DATABASE", - exist=force, + exists=force, this=sg.to_identifier(name, quoted=self.compiler.quoted), properties=properties, ) @@ -533,7 +533,7 @@ def drop_database( """ sql = sge.Drop( kind="DATABASE", - exist=force, + exists=force, this=sg.to_identifier(name, quoted=self.compiler.quoted), cascade=force, ) diff --git a/ibis/backends/pyspark/tests/test_ddl.py b/ibis/backends/pyspark/tests/test_ddl.py index cb00750a0003..7177521ef5c8 100644 --- a/ibis/backends/pyspark/tests/test_ddl.py +++ b/ibis/backends/pyspark/tests/test_ddl.py @@ -9,6 +9,7 @@ import ibis from ibis import util +from ibis.backends.tests.errors import PySparkAnalysisException from ibis.tests.util import assert_equal pyspark = pytest.importorskip("pyspark") @@ -177,3 +178,25 @@ def test_create_table_reserved_identifier(con, alltypes, keyword_t): t = con.create_table(keyword_t, expr) result = t.count().execute() assert result == expected + + +def test_create_database_exists(con): + con.create_database(dbname := util.gen_name("dbname")) + + with pytest.raises(PySparkAnalysisException): + con.create_database(dbname) + + con.create_database(dbname, force=True) + + con.drop_database(dbname, force=True) + + +def test_drop_database_exists(con): + con.create_database(dbname := util.gen_name("dbname")) + + con.drop_database(dbname) + + with pytest.raises(PySparkAnalysisException): + con.drop_database(dbname) + + con.drop_database(dbname, force=True)