Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Monkeypatch BiqQuery adapter to retrive SQL for async execution #1474

Draft
wants to merge 22 commits into
base: main
Choose a base branch
from
Draft
Changes from 1 commit
Commits
Show all changes
22 commits
Select commit Hold shift + click to select a range
e657823
Monkeypatch BiqQuery adapter for retriveing SQL for async execution
pankajkoti Jan 21, 2025
8b7b45d
Update cosmos/operators/local.py
pankajkoti Jan 21, 2025
e3ea847
Update cosmos/operators/local.py
pankajkoti Jan 21, 2025
8563a8c
Address @tatiana's review feedback
pankajkoti Jan 23, 2025
94eada9
Refactor run_command method to reduce complexity
pankajkoti Jan 24, 2025
92314e8
Resolve type-check errrors with respect to update method signatures
pankajkoti Jan 24, 2025
859f3ad
Fix tests args
pankajkoti Jan 24, 2025
379d997
Test async dag
pankajkoti Jan 24, 2025
7a85d27
Merge branch 'main' into monkeypatch-bq-adapter
pankajkoti Jan 27, 2025
152b936
🎨 [pre-commit.ci] Auto format from pre-commit.com hooks
pre-commit-ci[bot] Jan 27, 2025
c11f614
Update cosmos/operators/airflow_async.py
pankajkoti Jan 27, 2025
685757d
Update cosmos/operators/airflow_async.py
pankajkoti Jan 27, 2025
d327b6d
Update cosmos/operators/airflow_async.py
pankajkoti Jan 27, 2025
31161bf
Moment of glory
pankajkoti Jan 28, 2025
5ea5217
Moment of glory 2
pankajkoti Jan 29, 2025
dd595f7
🎨 [pre-commit.ci] Auto format from pre-commit.com hooks
pre-commit-ci[bot] Jan 29, 2025
f6e17a5
push the progress
pankajkoti Jan 30, 2025
e4fc114
Merge branch 'main' into monkeypatch-bq-adapter
pankajkoti Jan 30, 2025
93e7a8c
Stop another call to BaseOperator init
pankajkoti Jan 30, 2025
9fc5112
Fix import
pankajkoti Jan 30, 2025
55acacc
Try changing inheritance order to see if MRO helps
pankajkoti Jan 30, 2025
57ed5a8
Remove compile task test
pankajkoti Jan 31, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
Refactor run_command method to reduce complexity
pankajkoti committed Jan 24, 2025
commit 94eada99a769d3d9a03f4ca9d5ee89253b65fbf7
152 changes: 91 additions & 61 deletions cosmos/operators/local.py
Original file line number Diff line number Diff line change
@@ -406,6 +406,82 @@ def _read_run_sql_from_target_dir(self, tmp_project_dir: str, sql_context: dict[
sql_content: str = sql_file.read()
return sql_content

def _clone_project(self, tmp_dir_path: Path) -> None:
self.log.info(
"Cloning project to writable temp directory %s from %s",
tmp_dir_path,
self.project_dir,
)
create_symlinks(Path(self.project_dir), tmp_dir_path, self.install_deps)

def _handle_partial_parse(self, tmp_dir_path: Path) -> None:
if self.cache_dir is None:
return
latest_partial_parse = cache._get_latest_partial_parse(Path(self.project_dir), self.cache_dir)
self.log.info("Partial parse is enabled and the latest partial parse file is %s", latest_partial_parse)
if latest_partial_parse is not None:
cache._copy_partial_parse_to_project(latest_partial_parse, tmp_dir_path)

def _generate_dbt_flags(self, tmp_project_dir: str, profile_path: Path) -> list[str]:
return [
"--project-dir",
str(tmp_project_dir),
"--profiles-dir",
str(profile_path.parent),
"--profile",
self.profile_config.profile_name,
"--target",
self.profile_config.target_name,
]

def _install_dependencies(
self, tmp_dir_path: Path, flags: list[str], env: dict[str, str | bytes | os.PathLike[Any]]
) -> None:
self._cache_package_lockfile(tmp_dir_path)
deps_command = [self.dbt_executable_path, "deps"] + flags
self.invoke_dbt(command=deps_command, env=env, cwd=tmp_dir_path)

@staticmethod
def _mock_dbt_adapter(async_context: dict[str, Any] | None) -> None:
if not async_context:
raise CosmosValueError("`async_context` is necessary for running the model asynchronously")
if "async_operator" not in async_context:
raise CosmosValueError("`async_operator` needs to be specified in `async_context` when running as async")
if "profile_type" not in async_context:
raise CosmosValueError("`profile_type` needs to be specified in `async_context` when running as async")
profile_type = async_context["profile_type"]
if profile_type not in PROFILE_TYPE_MOCK_ADAPTER_CALLABLE_MAP:
raise CosmosValueError(f"Mock adapter callable function not available for profile_type {profile_type}")
mock_adapter_callable = PROFILE_TYPE_MOCK_ADAPTER_CALLABLE_MAP[profile_type]
mock_adapter_callable()

def _handle_datasets(self, context: Context) -> None:
inlets = self.get_datasets("inputs")
outlets = self.get_datasets("outputs")
self.log.info("Inlets: %s", inlets)
self.log.info("Outlets: %s", outlets)
self.register_dataset(inlets, outlets, context)

def _update_partial_parse_cache(self, tmp_dir_path: Path) -> None:
if self.cache_dir is None:
return
partial_parse_file = get_partial_parse_path(tmp_dir_path)
if partial_parse_file.exists():
cache._update_partial_parse_cache(partial_parse_file, self.cache_dir)

def _handle_post_execution(self, tmp_project_dir: str, context: Context) -> None:
self.store_freshness_json(tmp_project_dir, context)
self.store_compiled_sql(tmp_project_dir, context)
self.upload_compiled_sql(tmp_project_dir, context)
if self.callback:
self.callback_args.update({"context": context})
self.callback(tmp_project_dir, **self.callback_args)

def _handle_async_execution(self, tmp_project_dir: str, context: Context, async_context: dict[str, Any]) -> None:
sql = self._read_run_sql_from_target_dir(tmp_project_dir, async_context)
PROFILE_TYPE_ASSOCIATE_ARGS_CALLABLE_MAP[async_context["profile_type"]](self, sql=sql)
async_context["async_operator"].execute(self, context)

def run_command(
self,
cmd: list[str],
@@ -422,60 +498,27 @@ def run_command(

with tempfile.TemporaryDirectory() as tmp_project_dir:

self.log.info(
"Cloning project to writable temp directory %s from %s",
tmp_project_dir,
self.project_dir,
)
tmp_dir_path = Path(tmp_project_dir)
env = {k: str(v) for k, v in env.items()}
create_symlinks(Path(self.project_dir), tmp_dir_path, self.install_deps)
self._clone_project(tmp_dir_path)

if self.partial_parse and self.cache_dir is not None:
latest_partial_parse = cache._get_latest_partial_parse(Path(self.project_dir), self.cache_dir)
self.log.info("Partial parse is enabled and the latest partial parse file is %s", latest_partial_parse)
if latest_partial_parse is not None:
cache._copy_partial_parse_to_project(latest_partial_parse, tmp_dir_path)
if self.partial_parse:
self._handle_partial_parse(tmp_dir_path)

with self.profile_config.ensure_profile() as profile_values:
(profile_path, env_vars) = profile_values
env.update(env_vars)
self.log.debug("Using environment variables keys: %s", env.keys())

flags = [
"--project-dir",
str(tmp_project_dir),
"--profiles-dir",
str(profile_path.parent),
"--profile",
self.profile_config.profile_name,
"--target",
self.profile_config.target_name,
]
flags = self._generate_dbt_flags(tmp_project_dir, profile_path)

if self.install_deps:
self._cache_package_lockfile(tmp_dir_path)
deps_command = [self.dbt_executable_path, "deps"]
deps_command.extend(flags)
self.invoke_dbt(
command=deps_command,
env=env,
cwd=tmp_project_dir,
)

full_cmd = cmd + flags
self._install_dependencies(tmp_dir_path, flags, env)

self.log.debug("Using environment variables keys: %s", env.keys())
if run_as_async:
if not async_context:
raise CosmosValueError("async_context is necessary for running the model asynchronously.")
profile_type = async_context["profile_type"]
mock_adapter_callable = PROFILE_TYPE_MOCK_ADAPTER_CALLABLE_MAP.get(profile_type)
if not mock_adapter_callable:
raise CosmosValueError(
f"Mock adapter callable function not available for profile_type {profile_type}"
)
mock_adapter_callable()
self._mock_dbt_adapter(async_context)

full_cmd = cmd + flags
result = self.invoke_dbt(
command=full_cmd,
env=env,
@@ -488,29 +531,16 @@ def run_command(
].openlineage_events_completes = self.openlineage_events_completes # type: ignore

if self.emit_datasets:
inlets = self.get_datasets("inputs")
outlets = self.get_datasets("outputs")
self.log.info("Inlets: %s", inlets)
self.log.info("Outlets: %s", outlets)
self.register_dataset(inlets, outlets, context)

if self.partial_parse and self.cache_dir:
partial_parse_file = get_partial_parse_path(tmp_dir_path)
if partial_parse_file.exists():
cache._update_partial_parse_cache(partial_parse_file, self.cache_dir)

self.store_freshness_json(tmp_project_dir, context)
self.store_compiled_sql(tmp_project_dir, context)
self.upload_compiled_sql(tmp_project_dir, context)
if self.callback:
self.callback_args.update({"context": context})
self.callback(tmp_project_dir, **self.callback_args)
self._handle_datasets(context)

if self.partial_parse:
self._update_partial_parse_cache(tmp_dir_path)

self._handle_post_execution(tmp_project_dir, context)
self.handle_exception(result)

if run_as_async and async_context:
sql = self._read_run_sql_from_target_dir(tmp_project_dir, async_context)
PROFILE_TYPE_ASSOCIATE_ARGS_CALLABLE_MAP[profile_type](self, sql=sql)
async_context["async_operator"].execute(self, context)
self._handle_async_execution(tmp_project_dir, context, async_context)

return result