Skip to content

Commit 4c224ef

Browse files
committed
some fixes
1 parent 236de13 commit 4c224ef

File tree

10 files changed

+82
-88
lines changed

10 files changed

+82
-88
lines changed

.github/workflows/main.yml

+1-1
Original file line numberDiff line numberDiff line change
@@ -90,7 +90,7 @@ jobs:
9090
pip --version
9191
- name: Build distributions
9292
run: |
93-
pip install git+https://daimor:${{ secrets.GITTOKEN }}@github.com/caretdev/intersystems-irispython.git
93+
pip install https://github.com/intersystems-community/intersystems-irispython/releases/download/3.4.1/intersystems_iris-3.4.1-py3-none-any.whl
9494
./scripts/build-dist.sh
9595
9696
- name: Show distributions

Dockerfile

+4-17
Original file line numberDiff line numberDiff line change
@@ -1,19 +1,6 @@
1-
FROM intersystemsdc/iris-community
1+
FROM containers.intersystems.com/intersystems/iris-community:2022.3.0.606.0
22

3-
USER root
3+
ENV PIP_TARGET=${ISC_PACKAGE_INSTALLDIR}/mgr/python
44

5-
RUN apt-get update && apt-get -y install git
6-
7-
USER ${ISC_PACKAGE_MGRUSER}
8-
9-
COPY --chown=${ISC_PACKAGE_MGRUSER}:${ISC_PACKAGE_MGRGROUP} . /home/irisowner/dbt-iris
10-
11-
WORKDIR /home/irisowner/dbt-iris
12-
13-
ENV PATH="$PATH:/home/irisowner/.local/bin/"
14-
ENV PYTHONPATH="/home/irisowner/dbt-iris"
15-
16-
RUN python3 -m pip install --upgrade pip \
17-
&& pip install -r requirements-dev.txt -r requirements.txt
18-
19-
ENTRYPOINT [ "bash" ]
5+
RUN python3 -m pip install --upgrade pip && \
6+
pip install sqlalchemy~=1.4.46 pandas sqlalchemy-iris

dbt/adapters/iris/impl.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -53,7 +53,7 @@ def convert_number_type(cls, agate_table, col_idx):
5353

5454
@classmethod
5555
def convert_datetime_type(cls, agate_table: agate.Table, col_idx: int) -> str:
56-
return "timestamp"
56+
return "datetime"
5757

5858
def timestamp_add_sql(self, add_to: str, number: int = 1, interval: str = "hour") -> str:
5959
return f"dateadd('{interval}', {number}, {add_to})"
@@ -172,7 +172,7 @@ def submit_python_job(self, parsed_model: dict, compiled_code: str):
172172
"""
173173

174174
response, _ = self.execute(create_procedure, auto_begin=False, fetch=False)
175-
response, _ = self.execute(f"SELECT {proc_name}()", auto_begin=False, fetch=False)
175+
response, _ = self.execute(f"SELECT {proc_name}()", auto_begin=False, fetch=True)
176176
self.execute(f"drop procedure if exists {proc_name}", auto_begin=False, fetch=False)
177177
return response
178178

dbt/include/iris/macros/adapters.sql

+5-2
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@ dbt docs: https://docs.getdbt.com/docs/contributing/building-a-new-adapter
3030
{% call statement('_', auto_begin=False) -%}
3131
CREATE OR REPLACE FUNCTION HASH(alg VARCHAR(''), str VARCHAR(''))
3232
PROCEDURE
33-
RETURNS VARCHAR('')
33+
RETURNS VARCHAR(1024)
3434
LANGUAGE PYTHON
3535
{
3636
import hashlib
@@ -87,6 +87,7 @@ dbt docs: https://docs.getdbt.com/docs/contributing/building-a-new-adapter
8787
table_schema as "schema",
8888
case when table_type = 'BASE TABLE' then 'table'
8989
when table_type = 'VIEW' then 'view'
90+
when table_type = 'GLOBAL TEMPORARY' then 'table'
9091
else table_type
9192
end as table_type
9293
from information_schema.tables
@@ -134,7 +135,9 @@ dbt docs: https://docs.getdbt.com/docs/contributing/building-a-new-adapter
134135
{%- endif %}
135136
/* create_table_as */
136137
{{ sql_header if sql_header is not none }}
137-
create {% if temporary: -%}global temporary{%- endif %} table
138+
create
139+
/* {% if temporary: -%}global temporary{%- endif %} */
140+
table
138141
{{ relation }}
139142
as
140143
{{ compiled_code }}

dbt/include/iris/macros/materializations/snapshots/helpers.sql

+25-24
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@
1616
{{ strategy.scd_id }} as dbt_scd_id,
1717
{{ strategy.updated_at }} as dbt_updated_at,
1818
{{ strategy.updated_at }} as dbt_valid_from,
19-
null as dbt_valid_to
19+
cast(null as timestamp) as dbt_valid_to
2020
from (
2121
{{ sql }}
2222
) sbq
@@ -26,68 +26,69 @@
2626
{% macro iris__snapshot_staging_table(strategy, source_sql, target_relation) -%}
2727
select * from (
2828
select
29-
'insert' as dbt_change_type,
30-
source_data.*
29+
'update' as dbt_change_type,
30+
source_data.*,
31+
snapshotted_data.dbt_scd_id
3132

3233
from (
33-
3434
select
3535
*,
3636
{{ strategy.unique_key }} as dbt_unique_key,
3737
{{ strategy.updated_at }} as dbt_updated_at,
3838
{{ strategy.updated_at }} as dbt_valid_from,
39-
null as dbt_valid_to,
40-
{{ strategy.scd_id }} as dbt_scd_id
39+
{{ strategy.updated_at }} as dbt_valid_to
4140

4241
from ( {{ source_sql }} )
4342

4443
) as source_data
45-
left outer join (
44+
join (
4645
select *,
4746
{{ strategy.unique_key }} as dbt_unique_key
4847

4948
from {{ target_relation }}
50-
where dbt_valid_to is null
5149

5250
) as snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key
53-
where snapshotted_data.dbt_unique_key is null
54-
or (
55-
snapshotted_data.dbt_unique_key is not null
56-
and (
57-
{{ strategy.row_changed }}
58-
)
51+
where snapshotted_data.dbt_valid_to is null
52+
and (
53+
{{ strategy.row_changed }}
5954
)
60-
) as insertions
55+
) as updates
6156
union all
6257
select * from (
6358
select
64-
'update' as dbt_change_type,
65-
source_data.*,
66-
snapshotted_data.dbt_scd_id
59+
'insert' as dbt_change_type,
60+
source_data.*
6761

6862
from (
63+
6964
select
7065
*,
7166
{{ strategy.unique_key }} as dbt_unique_key,
7267
{{ strategy.updated_at }} as dbt_updated_at,
7368
{{ strategy.updated_at }} as dbt_valid_from,
74-
{{ strategy.updated_at }} as dbt_valid_to
69+
cast(null as timestamp) as dbt_valid_to,
70+
{{ strategy.scd_id }} as dbt_scd_id
7571

7672
from ( {{ source_sql }} )
7773

7874
) as source_data
79-
join (
75+
left outer join (
8076
select *,
8177
{{ strategy.unique_key }} as dbt_unique_key
8278

8379
from {{ target_relation }}
80+
where dbt_valid_to is null
8481

8582
) as snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key
86-
where snapshotted_data.dbt_valid_to is null
87-
and (
88-
{{ strategy.row_changed }}
83+
where snapshotted_data.dbt_unique_key is null
84+
or (
85+
snapshotted_data.dbt_unique_key is not null
86+
and (
87+
{{ strategy.row_changed }}
88+
)
8989
)
90-
) as updates
90+
) as insertions
91+
9192
{%- if strategy.invalidate_hard_deletes %}
9293
union all
9394
select * from (

dbt/include/iris/macros/materializations/snapshots/snapshot_merge.sql

+5-8
Original file line numberDiff line numberDiff line change
@@ -4,14 +4,11 @@
44

55
{% set update_sql %}
66
update {{ target }}
7-
set dbt_valid_to = (
8-
SELECT
9-
DBT_INTERNAL_SOURCE.dbt_valid_to
10-
from {{ source }} as DBT_INTERNAL_SOURCE
11-
where DBT_INTERNAL_SOURCE.dbt_scd_id = {{ target }}.dbt_scd_id
12-
and DBT_INTERNAL_SOURCE.dbt_change_type = 'update'
13-
)
14-
WHERE dbt_valid_to is null
7+
set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to
8+
from {{ source }} as DBT_INTERNAL_SOURCE
9+
where DBT_INTERNAL_SOURCE.dbt_scd_id = {{ target }}.dbt_scd_id
10+
and DBT_INTERNAL_SOURCE.dbt_change_type in ('update', 'delete')
11+
and {{ target }}.dbt_valid_to is null;
1512
{% endset %}
1613

1714
{% do adapter.add_query(update_sql, auto_begin=False) %}

dbt/include/iris/macros/materializations/table.sql

+14-23
Original file line numberDiff line numberDiff line change
@@ -64,21 +64,16 @@
6464
{% macro py_write_table(compiled_code, target_relation, temporary=False) %}
6565
{{ compiled_code }}
6666

67-
import sys
68-
paths = [
69-
'/home/irisowner/sqlalchemy',
70-
'/home/irisowner/sqlalchemy-iris',
71-
'/home/irisowner/intersystems-irispython',
72-
]
73-
for path in paths:
74-
if path not in sys.path:
75-
sys.path.insert(1, path)
7667
try:
7768
import pandas
69+
except:
70+
raise Exception("Missing required dependency: pandas")
71+
72+
try:
7873
from sqlalchemy import create_engine
7974
import intersystems_iris
8075
except:
81-
return "ERROR"
76+
raise Exception("Missing required dependencies: sqlalchemy, sqlalchemy-iris")
8277

8378
class DataFrame(pandas.DataFrame):
8479
def limit(self, num):
@@ -93,21 +88,17 @@ class IRISSession:
9388
def __init__(self) -> None:
9489
self.engine = create_engine('iris+emb:///')
9590

96-
def table(self, full_name) -> DataFrame:
97-
[schema, table] = full_name.split('.') if '.' in full_name else [self.default_schema, full_name]
98-
df = pandas.read_sql_table(table, self.engine, schema=schema)
99-
return DataFrame(df)
91+
def table(self, full_name) -> DataFrame:
92+
[schema, table] = full_name.split('.') if '.' in full_name else [self.default_schema, full_name]
93+
df = pandas.read_sql_table(table, self.engine, schema=schema)
94+
return DataFrame(df)
10095

10196
def to_sql(self, df, table, schema):
10297
df.to_sql(table, self.engine, if_exists='replace', schema=schema)
10398

104-
try:
105-
session = IRISSession()
106-
dbt = dbtObj(session.table)
107-
df = model(dbt, session)
108-
session.to_sql(df, '{{ target_relation.identifier }}', '{{ target_relation.schema }}')
109-
return "OK"
110-
except Exception as ex:
111-
print(ex)
112-
return "ERROR"
99+
session = IRISSession()
100+
dbt = dbtObj(session.table)
101+
df = model(dbt, session)
102+
session.to_sql(df, '{{ target_relation.identifier }}', '{{ target_relation.schema }}')
103+
return "OK"
113104
{% endmacro %}

docker-compose.yml

+5-3
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,13 @@
11
version: '3'
22
services:
33
iris:
4-
image: intersystemsdc/iris-community
4+
build: .
55
command:
66
- -a
77
# by default it starts with a requirement to change passwords for system users
88
- iris session iris -U %SYS '##class(Security.Users).UnExpireUserPasswords("*")'
99
ports:
10-
- 1972:1972
11-
- 52773:52773
10+
- 1975:1972
11+
- 52775:52773
12+
volumes:
13+
- ~/iris.key:/usr/irissys/mgr/iris.key

scripts/build-dist.sh

+19-6
Original file line numberDiff line numberDiff line change
@@ -1,20 +1,33 @@
11
#!/bin/bash
22

3+
packages=("iris" "intersystems_iris" "irisnative")
4+
for package in ${packages[@]};
5+
do
6+
rm -f ./$package
7+
package_path=`python -c "import importlib.util; print(importlib.util.find_spec('${package}').submodule_search_locations[0])"`
8+
ln -s $package_path ./$package
9+
done
10+
311
set -eo pipefail
412

5-
DBT_PATH="$( cd "$(dirname "$0")/.." ; pwd -P )"
13+
PROJECT="$( cd "$(dirname "$0")/.." ; pwd -P )"
614

7-
PYTHON_BIN=${PYTHON_BIN:-python}
15+
PYTHON_BIN=${PYTHON_BIN:-python3}
816

917
echo "$PYTHON_BIN"
1018

1119
set -x
1220

13-
rm -rf "$DBT_PATH"/dist
14-
rm -rf "$DBT_PATH"/build
15-
mkdir -p "$DBT_PATH"/dist
21+
rm -rf "$PROJECT"/dist
22+
rm -rf "$PROJECT"/build
23+
mkdir -p "$PROJECT"/dist
1624

17-
cd "$DBT_PATH"
25+
cd "$PROJECT"
1826
$PYTHON_BIN setup.py sdist bdist_wheel
1927

2028
set +x
29+
30+
for package in ${packages[@]};
31+
do
32+
rm -f ./$package
33+
done

setup.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44

55
package_name = "dbt-iris"
66
# make sure this always matches dbt/adapters/{adapter}/__version__.py
7-
package_version = "1.3.1.1"
7+
package_version = "1.3.2"
88
description = """The InterSystems IRIS adapter plugin for dbt"""
99

1010
thelibFolder = os.path.dirname(os.path.realpath(__file__))
@@ -25,7 +25,7 @@
2525
author="CaretDev",
2626
author_email="[email protected]",
2727
url="https://github.com/caretdev/dbt-iris",
28-
packages=find_namespace_packages(include=["dbt", "dbt.*"]),
28+
packages=find_namespace_packages(include=["dbt", "dbt.*", "iris", "intersystems_iris.*", "irisnative"]),
2929
include_package_data=True,
3030
install_requires=requirements,
3131
classifiers=[

0 commit comments

Comments
 (0)